var cfg = new IgniteClientConfiguration { Host = "127.0.0.1" }; using (IIgniteClient igniteClient = Ignition.StartClient(cfg)) { ICacheClient<int, Organization> cache = igniteClient.GetCache<int, Organization>(CacheName); Organization org = new Organization( "GridGain", new Address(". -, . , . 69–71, ", 191119), new Email("rusales@gridgain.com"), OrganizationType.Private, DateTime.Now ); // . cache.Put(1, org); // . Organization orgFromCache = cache.Get(1); }
-- WAL ( ). ALTER TABLE my_table NOLOGGING; -- , , . ALTER TABLE my_table LOGGING;
ignite.cluster().isWalEnabled(cacheName); // , WAL. ignite.cluster().enableWal(cacheName); // WAL. ignite.cluster().disableWal(cacheName); // WAL.
// INLINE_SIZE — ; // PARALLEL — . CREATE INDEX fast_city_idx ON sales (country, city) INLINE_SIZE 60 PARALLEL 8;
// . int samplesCnt = 100000; // sin^2 [0; pi/2]. IgniteSupplier<Double> pointsGen = () -> (Math.random() + 1) / 2 * (Math.PI / 2); IgniteDoubleFunction<Double> f = x -> Math.sin(x) * Math.sin(x); IgniteCache<Integer, LabeledVector<Vector, Vector>> cache = LabeledVectorsCache.createNew(ignite); String cacheName = cache.getName(); // IgniteDataStreamer. try (IgniteDataStreamer<Integer, LabeledVector<Vector, Vector>> streamer = ignite.dataStreamer(cacheName)) { streamer.perNodeBufferSize(10000); for (int i = 0; i < samplesCnt; i++) { double x = pointsGen.get(); double y = f.apply(x); streamer.addData(i, new LabeledVector<>(new DenseLocalOnHeapVector(new double[] {x}), new DenseLocalOnHeapVector(new double[] {y}))); } } // . MLPGroupUpdateTrainer<RPropParameterUpdate> trainer = MLPGroupUpdateTrainer.getDefault(ignite). withSyncPeriod(3). withTolerance(0.0001). withMaxGlobalSteps(100). withUpdateStrategy(UpdateStrategies.RProp()); // . MLPArchitecture conf = new MLPArchitecture(1). withAddedLayer(10, true, Activators.SIGMOID). withAddedLayer(1, true, Activators.SIGMOID); MLPGroupUpdateTrainerCacheInput trainerInput = new MLPGroupUpdateTrainerCacheInput(conf, new RandomInitializer(new Random()), 6, cache, 1000); // . MultilayerPerceptron mlp = trainer.train(trainerInput); int testCnt = 1000; Matrix test = new DenseLocalOnHeapMatrix(1, testCnt); for (int i = 0; i < testCnt; i++) test.setColumn(i, new double[] {pointsGen.get()}); Matrix predicted = mlp.apply(test); Matrix actual = test.copy().map(f); Vector predicted = mlp.apply(test).getRow(0); Vector actual = test.copy().map(f).getRow(0); // . Tracer.showAscii(predicted); Tracer.showAscii(actual); System.out.println("MSE: " + (predicted.minus(actual).kNorm(2) / predicted.size()));
Source: https://habr.com/ru/post/351098/
All Articles