Mapping with JPA Annotations in Grails 3.x - grails

I have a problem with using JPA annotated class in Grails (tried on grails-3.1.11 and grails-3.2.0) in my test application.
I followed description in Grails documentation mappingWithHibernateAnnotation and multiple answers on Stackoverflow for similar problems (question 1, question 2 and question 3), but no luck.
Here is a description what I do (same on both versions of Grails):
1. Create a new Grails project (I'm using IntelliJ Idea 2016.2.4).
2. Set up a datasource in application.yml:
dataSource:
configClass: org.codehaus.groovy.grails.orm.hibernate.cfg.GrailsAnnotationConfiguration.class
pooled: true
jmxExport: true
dialect: org.hibernate.dialect.PostgreSQLDialect
driverClassName: org.postgresql.Driver
username: postgres
password: masterkey
3. Add PostgreSQL JDBC dependency to build.grandle:
compile group: 'org.postgresql', name: 'postgresql', version: '9.4-1200-jdbc41'
4. Create a new class DictionaryEntity.groovy in src/main/groovy:
package persistence.postgresql.mapping
import javax.persistence.*;
#Entity(name = "persistence.postgresql.mapping.DictionaryEntity")
#Table(name = "dictionary")
public class DictionaryEntity implements Serializable {
private int id;
private int word;
private int language;
private String txt;
#SequenceGenerator(name = "dictionary_id_seq_gen", sequenceName = "dictionary_id_seq", allocationSize = 1)
#Id
#GeneratedValue(generator = "dictionary_id_seq_gen", strategy = GenerationType.SEQUENCE)
#Column(name = "id", nullable = false)
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
#Column(name = "word", nullable = false)
public int getWord() {
return word;
}
public void setWord(int word) {
this.word = word;
}
#Column(name = "language", nullable = false)
public int getLanguage() {
return language;
}
public void setLanguage(int language) {
this.language = language;
}
#Column(name = "txt", length = 128)
public String getTxt() {
return txt;
}
public void setTxt(String txt) {
this.txt = txt;
}
public DictionaryEntity() {
}
#Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
DictionaryEntity that = (DictionaryEntity) o;
if (id != that.id) return false;
if (word != that.word) return false;
if (language != that.language) return false;
if (txt != null ? !txt.equals(that.txt) : that.txt != null) return false;
return true;
}
#Override
public int hashCode() {
int result = id;
result = 31 * result + word;
result = 31 * result + language;
result = 31 * result + (txt != null ? txt.hashCode() : 0);
return result;
}
}
5. Create hibernate.cfg.xml in grails-app/conf directory (tried grails-app/conf/hibernate as well):
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE hibernate-configuration PUBLIC
"-//Hibernate/Hibernate Configuration DTD 3.0//EN"
"http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<mapping class="persistence.postgresql.mapping.DictionaryEntity"/>
</session-factory>
</hibernate-configuration>
6. Created a new DictionaryEntityController in grails-app/controllers/grailsproj directory (scaffolding plugin has included to dependencies):
package grailsproj
import persistence.postgresql.mapping.DictionaryEntity
class DictionaryEntityController {
static scaffold = DictionaryEntity
}
When I'm running the application there is no domain classes ("Domains: 0" in Artefacts). If I go to my controller ("/dictionaryEntity/index") I'm getting the following error:
ERROR org.grails.web.errors.GrailsExceptionResolver - MissingMethodException occurred when processing request: [GET] /dictionaryEntity/index
No signature of method: static persistence.postgresql.mapping.DictionaryEntity.count() is applicable for argument types: () values: []
Possible solutions: print(java.lang.Object), print(java.io.PrintWriter), wait(), find(), collect(), any(). Stacktrace follows:
java.lang.reflect.InvocationTargetException: null
at org.grails.core.DefaultGrailsControllerClass$ReflectionInvoker.invoke(DefaultGrailsControllerClass.java:210)
at org.grails.core.DefaultGrailsControllerClass.invoke(DefaultGrailsControllerClass.java:187)
at org.grails.web.mapping.mvc.UrlMappingsInfoHandlerAdapter.handle(UrlMappingsInfoHandlerAdapter.groovy:90)
at org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:963)
at org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:897)
at org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:970)
at org.springframework.web.servlet.FrameworkServlet.doGet(FrameworkServlet.java:861)
at org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:846)
at org.springframework.boot.web.filter.ApplicationContextHeaderFilter.doFilterInternal(ApplicationContextHeaderFilter.java:55)
at org.grails.web.servlet.mvc.GrailsWebRequestFilter.doFilterInternal(GrailsWebRequestFilter.java:77)
at org.grails.web.filters.HiddenHttpMethodFilter.doFilterInternal(HiddenHttpMethodFilter.java:67)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: groovy.lang.MissingMethodException: No signature of method: static persistence.postgresql.mapping.DictionaryEntity.count() is applicable for argument types: () values: []
Possible solutions: print(java.lang.Object), print(java.io.PrintWriter), wait(), find(), collect(), any()
at grails.rest.RestfulController.countResources(RestfulController.groovy:277)
at grails.rest.RestfulController.index(RestfulController.groovy:64)
at grails.transaction.GrailsTransactionTemplate$2.doInTransaction(GrailsTransactionTemplate.groovy:96)
at org.springframework.transaction.support.TransactionTemplate.execute(TransactionTemplate.java:133)
at grails.transaction.GrailsTransactionTemplate.execute(GrailsTransactionTemplate.groovy:93)
... 14 common frames omitted
Looks like DictionaryEntity hasn't recognized as domain class.
My pure Java+Hibernate application works fine with the same mapping class so I need your advice how to resolve this issue.
Update
Added annotation #grails.gorm.Entity regarding to Joshua's answer, the error has changed to:
Either class [persistence.postgresql.mapping.DictionaryEntity] is not a domain class or GORM has not been initialized correctly or has already been shutdown. Ensure GORM is loaded and configured correctly before calling any methods on a GORM entity.

GORM 5 does not seem to support JPA with hibernate.cfg.xml. Take a look at this pull request conversation for more details. The main grails docs seem like they are incorrect and need to be updated
https://github.com/grails/grails-data-mapping/pull/678
also consider #grails.gorm.Entity if you control the source

Related

Getting unable to serialize DoFnWithExecutionInformation getting this error while building TableRow

I am trying to Convert PCollection of Strings into Pcollection of BQ TableRow.
My Apache beam version is 2.41 and JAVA 11. I tried multiple ways but could not able to fix this error.
TableSchema is loaded from avro file and providing it to pcollection as ValueProvider.
Please help me to fix this.
Code:
public static void main(String[] args) {
PipelineOptions options = PipelineOptionsFactory.create();
options.setRunner(DirectRunner.class);
options.setTempLocation("data/temp/");
Pipeline p = Pipeline.create(options);
BeamShemaUtil beamShemaUtil = new BeamShemaUtil("data/ship_data_schema.avsc");
TableSchema tableSchema = beamShemaUtil.convertBQTableSchema();
ValueProvider<TableSchema> ts= ValueProvider.StaticValueProvider.of(tableSchema);
PCollection<String> pc1 = p.apply(TextIO.read().from("data/ship_data.csv"));
PCollection<TableRow> pc2 = pc1.apply(MapElements.via(new ConvertStringToTableRow(ts))) ;
PipelineResult result = p.run();
result.waitUntilFinish();
SimpleFunction Class
public static class ConvertStringToTableRow extends SimpleFunction<String, TableRow> {
ValueProvider<TableSchema> tableSchema;
public ConvertStringToTableRow(ValueProvider<TableSchema> tableSchema) {
this.tableSchema = tableSchema;
}
public TableRow buildTableRow(TableSchema sc,String[] arr) {
List<TableFieldSchema> fieldSchemaList = sc.getFields();
List<String> data = Arrays.stream(arr).collect(Collectors.toList());
TableRow row = new TableRow();
TableCell record = new TableCell();
List<TableCell> tc = new ArrayList<TableCell>();
for ( int i = 0; i < fieldSchemaList.size(); i++ ){
TableFieldSchema sc2 = fieldSchemaList.get(i);
String fieldName = sc2.getName();
String fieldType = sc2.getType();
String fieldValue = data.get(i);
if (fieldValue.isEmpty()) {
record.set(fieldName,null);
tc.add(record);
}
else {
switch (fieldType) {
case "STRING":
record.set(fieldName,fieldValue);
tc.add(record);
case "BYTES":
record.set(fieldName,fieldValue.getBytes());
tc.add(record);
case "INT64":
record.set(fieldName,Integer.valueOf(fieldValue));
tc.add(record);
case "INTEGER":
record.set(fieldName,Integer.valueOf(fieldValue));
tc.add(record);
case "FLOAT64":
record.set(fieldName,Float.valueOf(fieldValue));
tc.add(record);
case "FLOAT":
record.set(fieldName,Float.valueOf(fieldValue));
tc.add(record);
case "BOOL":
case "BOOLEAN":
case "NUMERIC":
record.set(fieldName,Integer.valueOf(fieldValue));
tc.add(record);
case "TIMESTAMP":
case "TIME":
case "DATE":
case "DATETIME":
case "STRUCT":
case "RECORD":
default:
// row.set(fieldName,fieldValue);
// throw new UnsupportedOperationException("Unsupported BQ Data Type");
}
}
}
return row.setF(tc);
}
#Override
public TableRow apply(String element) {
String[] arr = element.split(",");
// BeamShemaUtil beamShemaUtil = new BeamShemaUtil("data/ship_data_schema.avsc");
// TableSchema tableSchema = beamShemaUtil.convertBQTableSchema();
TableRow row = buildTableRow(tableSchema.get(), arr);
return row;
}
Error Messages:
Exception in thread "main" java.lang.IllegalArgumentException: unable to serialize DoFnWithExecutionInformation{doFn=org.apache.beam.sdk.transforms.MapElements$1#270a620, mainOutputTag=Tag<output>, sideInputMapping={}, schemaInformation=DoFnSchemaInformation{elementConverters=[], fieldAccessDescriptor=*}}
at org.apache.beam.sdk.util.SerializableUtils.serializeToByteArray(SerializableUtils.java:59)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.translateDoFn(ParDoTranslation.java:737)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation$1.translateDoFn(ParDoTranslation.java:268)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.payloadForParDoLike(ParDoTranslation.java:877)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.translateParDo(ParDoTranslation.java:264)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.translateParDo(ParDoTranslation.java:225)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation$ParDoTranslator.translate(ParDoTranslation.java:191)
at org.apache.beam.repackaged.direct_java.runners.core.construction.PTransformTranslation.toProto(PTransformTranslation.java:248)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.getParDoPayload(ParDoTranslation.java:788)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.isSplittable(ParDoTranslation.java:803)
at org.apache.beam.repackaged.direct_java.runners.core.construction.PTransformMatchers$6.matches(PTransformMatchers.java:274)
at org.apache.beam.sdk.Pipeline$2.visitPrimitiveTransform(Pipeline.java:290)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:593)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit(TransformHierarchy.java:585)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$500(TransformHierarchy.java:240)
at org.apache.beam.sdk.runners.TransformHierarchy.visit(TransformHierarchy.java:214)
at org.apache.beam.sdk.Pipeline.traverseTopologically(Pipeline.java:469)
at org.apache.beam.sdk.Pipeline.replace(Pipeline.java:268)
at org.apache.beam.sdk.Pipeline.replaceAll(Pipeline.java:218)
at org.apache.beam.runners.direct.DirectRunner.performRewrites(DirectRunner.java:254)
at org.apache.beam.runners.direct.DirectRunner.run(DirectRunner.java:175)
at org.apache.beam.runners.direct.DirectRunner.run(DirectRunner.java:67)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:323)
at org.apache.beam.sdk.Pipeline.run(Pipeline.java:309)
at BuildWriteBQTableRowExample01.main(BuildWriteBQTableRowExample01.java:50)
Caused by: java.io.NotSerializableException: com.google.api.services.bigquery.model.TableSchema
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1185)
at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1553)
at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1510)
at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1433)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1179)
at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1553)
at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1510)
at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1433)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1179)
at java.base/java.io.ObjectOutputStream.writeArray(ObjectOutputStream.java:1379)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1175)
at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1553)
at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1510)
at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1433)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1179)
at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1553)
at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1510)
at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1433)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1179)
at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1553)
at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1510)
at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1433)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1179)
at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1553)
at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1510)
at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1433)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1179)
at java.base/java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1553)
at java.base/java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1510)
at java.base/java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1433)
at java.base/java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1179)
at java.base/java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:349)
at org.apache.beam.sdk.util.SerializableUtils.serializeToByteArray(SerializableUtils.java:55)
... 26 more
Process finished with exit code 1
TableSchema is not Serializable, so the JVM/Runner can't copy your the instance wrapped in the StaticValueProvider. This is similar to the issue seen here: Read specific record(s) from Dynamo using Apache Beam DynamoDBIO
Please check https://beam.apache.org/documentation/programming-guide/#user-code-serializability for more information.
In your specific scenario, my recommendation would be creating the TableSchema within the ValueProvider itself instead of relying on serialization.
While I haven't tested with your code, I believe something similar is sufficient:
PCollection<String> pc1 = p.apply(TextIO.read().from("data/ship_data.csv"));
PCollection<TableRow> pc2 = pc1.apply(MapElements.via(
new ConvertStringToTableRow(
() -> new BeamShemaUtil("data/ship_data_schema.avsc").convertBQTableSchema()
)));
PipelineResult result = p.run();
result.waitUntilFinish();
I propose you a solution, it's not perfect but I hope it can help.
You can use your own structure for table schema, and convert TableFieldSchema to a custom created object that implements Serializable, example :
public class MyTableSchemaFields implement Serializable {
private String fieldName;
private String fieldType;
// Constructor
.....
// Getters and setters
.......
}
public List<MyTableSchemaFields> toMyTableSchemaFields(final List<TableFieldSchema> schemaFields) {
return schemaFields.stream()
.map(this::toMyTableSchemaField)
.collect(Collectors.toList());
}
public List<MyTableSchemaFields> toMyTableSchemaField(final TableFieldSchema schemaField) {
MyTableSchemaFields field = new MyTableSchemaFields();
field.setFieldName(schemaField.getName());
field.setFieldType(schemaField.getType());
return field;
}
Then in the rest of your program, use MyTableSchemaFields instead of TableFieldSchema :
public static class ConvertStringToTableRow extends SerializableFunction<String, TableRow> {
List<MyTableSchemaFields> schemaFields;
public ConvertStringToTableRow(List<MyTableSchemaFields> schemaFields) {
this.schemaFields = schemaFields;
}
public TableRow buildTableRow(List<MyTableSchemaFields> schemaFields,String[] arr) {
...........
For the class ConvertStringToTableRow I used a SerializableFunction in my example instead of SimpleFunction.

Need to insert rows in clickhouseIO from apache beam(dataflow)

I am reading from a Pub/Sub topic which running fine now I need to insert into a Table on clickHouse.
I am learning please excuse the tardiness.
PipelineOptions options = PipelineOptionsFactory.create();
//PubSubToDatabasesPipelineOptions options;
Pipeline p = Pipeline.create(options);
PCollection<String> inputFromPubSub = p.apply(namePrefix + "ReadFromPubSub",
PubsubIO.readStrings().fromSubscription("projects/*********/subscriptions/crypto_bitcoin.dataflow.bigquery.transactions").withIdAttribute(PUBSUB_ID_ATTRIBUTE));
PCollection<TransactionSmall> res = inputFromPubSub.apply(namePrefix + "ReadFromPubSub", ParDo.of(new DoFn<String, TransactionSmall>() {
#ProcessElement
public void processElement(ProcessContext c) {
String item = c.element();
//System.out.print(item);
Transaction transaction = JsonUtils.parseJson(item, Transaction.class);
//System.out.print(transaction);
c.output(new TransactionSmall(new Date(),transaction.getHash(), 123));
}}));
res.apply(ClickHouseIO.<TransactionSmall>write("jdbc:clickhouse://**.**.**.**:8123/litecoin?password=*****", "****"));
p.run().waitUntilFinish();
My TransactionSmall.java
import java.io.Serializable;
import java.util.Date;
public class TransactionSmall implements Serializable {
private Date created_dt;
private String hash;
private int number;
public TransactionSmall(Date created_dt, String hash, int number) {
this.created_dt = created_dt;
this.hash = hash;
this.number = number;
}
}
My table definition
clickhouse.us-east1-b.c.staging-btc-etl.internal :) CREATE TABLE litecoin.saurabh_blocks_small (`created_date` Date DEFAULT today(), `hash` String, `number` In) ENGINE = MergeTree(created_date, (hash, number), 8192)
CREATE TABLE litecoin.saurabh_blocks_small
(
`created_date` Date,
`hash` String,
`number` In
)
ENGINE = MergeTree(created_date, (hash, number), 8192)
I am getting error like
java.lang.IllegalArgumentException: Type of #Element must match the DoFn typesaurabhReadFromPubSub2/ParMultiDo(Anonymous).output [PCollection]
at org.apache.beam.sdk.transforms.ParDo.getDoFnSchemaInformation (ParDo.java:577)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.translateParDo (ParDoTranslation.java:185)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation$ParDoTranslator.translate (ParDoTranslation.java:124)
at org.apache.beam.repackaged.direct_java.runners.core.construction.PTransformTranslation.toProto (PTransformTranslation.java:155)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.getParDoPayload (ParDoTranslation.java:650)
at org.apache.beam.repackaged.direct_java.runners.core.construction.ParDoTranslation.isSplittable (ParDoTranslation.java:665)
at org.apache.beam.repackaged.direct_java.runners.core.construction.PTransformMatchers$6.matches (PTransformMatchers.java:269)
at org.apache.beam.sdk.Pipeline$2.visitPrimitiveTransform (Pipeline.java:282)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit (TransformHierarchy.java:665)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit (TransformHierarchy.java:657)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit (TransformHierarchy.java:657)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.visit (TransformHierarchy.java:657)
at org.apache.beam.sdk.runners.TransformHierarchy$Node.access$600 (TransformHierarchy.java:317)
at org.apache.beam.sdk.runners.TransformHierarchy.visit (TransformHierarchy.java:251)
at org.apache.beam.sdk.Pipeline.traverseTopologically (Pipeline.java:460)
at org.apache.beam.sdk.Pipeline.replace (Pipeline.java:260)
at org.apache.beam.sdk.Pipeline.replaceAll (Pipeline.java:210)
at org.apache.beam.runners.direct.DirectRunner.run (DirectRunner.java:170)
at org.apache.beam.runners.direct.DirectRunner.run (DirectRunner.java:67)
at org.apache.beam.sdk.Pipeline.run (Pipeline.java:315)
at org.apache.beam.sdk.Pipeline.run (Pipeline.java:301)
at io.blockchainetl.bitcoin.Trail.main (Trail.java:74)
at sun.reflect.NativeMethodAccessorImpl.invoke0 (Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke (NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke (DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke (Method.java:498)
at org.codehaus.mojo.exec.ExecJavaMojo$1.run (ExecJavaMojo.java:282)
at java.lang.Thread.run (Thread.java:748)
what would be the best way and cleanest way to achieve this without explicitly creating objects?
Thanks
This is likely happening because Beam relies on the coder specification for a PCollection when it infers the schema for it. It seems to be having trouble inferring the input schema for your ClickhouseIO transform.
You can compel Beam to have a schema by specifying a coder with schema inference, such as AvroCoder. You'd do:
#DefaultCoder(AvroCoder.class)
public class TransactionSmall implements Serializable {
private Date created_dt;
private String hash;
private int number;
public TransactionSmall(Date created_dt, String hash, int number) {
this.created_dt = created_dt;
this.hash = hash;
this.number = number;
}
}
Or you can also set the coder for the PCollection on your pipeline:
PCollection<TransactionSmall> res = inputFromPubSub.apply(namePrefix + "ReadFromPubSub", ParDo.of(new DoFn<String, TransactionSmall>() {
#ProcessElement
public void processElement(ProcessContext c) {
String item = c.element();
Transaction transaction = JsonUtils.parseJson(item, Transaction.class);
c.output(new TransactionSmall(new Date(),transaction.getHash(), 123));
}}))
.setCoder(AvroCoder.of(TransactionSmall.class));
res.apply(ClickHouseIO.<TransactionSmall>write("jdbc:clickhouse://**.**.**.**:8123/litecoin?password=*****", "****"));

neo4j spring data Unrecognized field "meta"

I'm starting to work with Neo4j and spring data.
I can do a get on the data base but can not make a set.
When I try to use the save method happens this error. It seems to be because of the unrecognized field by jackson. I looked for solutions but nothing worked.
Can anybody help me?
10:52:49.481 [http-nio-8080-exec-2] INFO o.s.d.n.config.Neo4jConfiguration - Initialising Neo4jSession
10:52:51.617 [http-nio-8080-exec-2] ERROR o.n.o.s.response.RowModelResponse - failed to parse: {"row":[1],"meta":[null]}]}
10:52:51.694 [http-nio-8080-exec-2] INFO o.s.d.n.config.Neo4jConfiguration - Intercepted exception
May 13, 2016 10:52:56 AM org.apache.catalina.core.StandardWrapperValve invoke
SEVERE: Servlet.service() for servlet [DispatcherServlet] in context with path [/ProjetoExemplo] threw exception [Request processing failed; nested exception is java.lang.RuntimeException: com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException: Unrecognized field "meta" (class org.neo4j.ogm.session.result.RowModelResult), not marked as ignorable (one known property: "row"])
at [Source: {"row":[1],"meta":[null]}]}; line: 1, column: 20] (through reference chain: org.neo4j.ogm.session.result.RowModelResult["meta"])] with root cause
com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException: Unrecognized field "meta" (class org.neo4j.ogm.session.result.RowModelResult), not marked as ignorable (one known property: "row"])
at [Source: {"row":[1],"meta":[null]}]}; line: 1, column: 20] (through reference chain: org.neo4j.ogm.session.result.RowModelResult["meta"])
at com.fasterxml.jackson.databind.exc.UnrecognizedPropertyException.from(UnrecognizedPropertyException.java:51)
at com.fasterxml.jackson.databind.DeserializationContext.reportUnknownProperty(DeserializationContext.java:731)
at com.fasterxml.jackson.databind.deser.std.StdDeserializer.handleUnknownProperty(StdDeserializer.java:915)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.handleUnknownProperty(BeanDeserializerBase.java:1292)
at com.fasterxml.jackson.databind.deser.BeanDeserializerBase.handleUnknownVanilla(BeanDeserializerBase.java:1270)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.vanillaDeserialize(BeanDeserializer.java:247)
at com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:118)
at com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:3051)
at com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2146)
at org.neo4j.ogm.session.response.RowModelResponse.next(RowModelResponse.java:45)
at org.neo4j.ogm.session.response.SessionResponseHandler.updateObjects(SessionResponseHandler.java:93)
at org.neo4j.ogm.session.delegates.SaveDelegate.save(SaveDelegate.java:69)
at org.neo4j.ogm.session.delegates.SaveDelegate.save(SaveDelegate.java:43)
at org.neo4j.ogm.session.Neo4jSession.save(Neo4jSession.java:386)
My class:
#NodeEntity
#JsonIgnoreProperties(ignoreUnknown = true)
public class Teste {
#JsonProperty("id")
private Long id;
public Long getId() {
return id;
}
private String name;
public Teste(){
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
#Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
#Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Teste other = (Teste) obj;
if (id == null) {
if (other.id != null)
return false;
} else if (!id.equals(other.id))
return false;
return true;
}
My call:
Teste teste1 = new Teste();
teste1.setName("TESTE");
testeService.save(teste1);
Looks like you're using Neo4j 3? If so, the version of SDN you're using is probably incompatible.
Neo4j 3.0 is compatible with these:
Current snapshot of SDN 4.2: 4.2.0.BUILD-SNAPSHOT
SDN 4.1.1: 4.1.1.RELEASE with neo4j-ogm 2.0.2

How to input null-value into specflow step definition table

How can I input a null value in Specflow through a table?
Let's look at an overly simplistic example:
When a tire is attached to a car
| CarId | TireModel | FabricationDate | Batch |
| 1 | Nokian Hakka R | 2015-09-1 | |
The empty string in the Batch column is interpreted as text by specflow and as such, empty string. Is there a special syntax to mark that column as null?
You can create your own IValueRetriever and replace default one with yours
public class StringValueRetriver : IValueRetriever
{
public bool CanRetrieve(KeyValuePair<string, string> keyValuePair, Type targetType, Type propertyType)
{
return propertyType == typeof(string);
}
public object Retrieve(KeyValuePair<string, string> keyValuePair, Type targetType, Type propertyType)
{
return string.IsNullOrEmpty(keyValuePair.Value) ? null : keyValuePair.Value;
}
}
Some where in your scenario steps
[BeforeScenario]
public void BeforeScenario()
{
Service.Instance.ValueRetrievers.Unregister<TechTalk.SpecFlow.Assist.ValueRetrievers.StringValueRetriever>();
Service.Instance.ValueRetrievers.Register(new StringValueRetriver());
}
older syntax:
[BeforeScenario]
public void BeforeScenario()
{
var defaultStringValueRetriever = Service.Instance.ValueRetrievers.FirstOrDefault(vr => vr is TechTalk.SpecFlow.Assist.ValueRetrievers.StringValueRetriever);
if (defaultStringValueRetriever != null)
{
Service.Instance.UnregisterValueRetriever(defaultStringValueRetriever);
Service.Instance.RegisterValueRetriever(new StringValueRetriver());
}
}
From SpecFlow 3 on-wards, in your Steps class, you can just put the following code. And in the feature file just put null value like this. Now when you use the CreateSet function then it will be deserialized correctly.
Id | Value
1 | <null>
[Binding]
public static class YourStepClass
{
[BeforeTestRun]
public static void BeforeTestRun()
{
Service.Instance.ValueRetrievers.Register(new NullValueRetriever("<null>"));
}
}
I don't believe there is a special syntax for null and I think you'll have to just handle the conversion yourself. The value retrievers have been revised in the v2 branch and you might be able to handle this by deregistering the standard string value retriever and registering your own implementation which looks for some special syntax and returns null.
In the current 1.9.* version though I think you'll just have to check for empty string and return null yourself.
I've just chosen to do this on a case by case manner using a simple extension method.
In the handler I convert the passed in example value parameter and call NullIfEmpty()
Example usage
AndICheckTheBatchNumber(string batch) {
batch = batch.NullIfEmpty();
//use batch as null how you intended
}
Extension method
using System;
namespace Util.Extensions
{
public static class StringExtensions
{
public static string NullIfEmpty(this string str)
{
if (string.IsNullOrEmpty(str))
{
return null;
}
return str;
}
}
}
Combining answers, I did the following:
using TechTalk.SpecFlow;
using TechTalk.SpecFlow.Assist;
using TechTalk.SpecFlow.Assist.ValueRetrievers;
namespace Util.Extensions
{
public class NullValueComparer : IValueComparer
{
private readonly string _nullValue;
public NullValueComparer(string nullValue)
{
_nullValue = nullValue;
}
public bool CanCompare(object actualValue)
{
return actualValue is null || actualValue is string;
}
public bool Compare(string expectedValue, object actualValue)
{
if (_nullValue == expectedValue)
{
return actualValue == null;
}
return expectedValue == (string)actualValue;
}
}
}
And referenced it like this:
[Binding]
public class MyStepDefinitions
{
private MyTestDto _testDto;
private AnotherDtoFromElsewhere _actual;
[BeforeScenario]
public void BeforeTestRun()
{
Service.Instance.ValueRetrievers.Register(new NullValueRetriever("<null>"));
Service.Instance.ValueComparers.Register(new NullValueComparer("<null>"));
}
[When(#"Some test with table:")]
public void WhenTestWithTable(Table table)
{
_testDto = table.CreateInstance<MyTestDto>();
var actual = new AnotherDtoFromElsewhere();
table.CompareToInstance(actual);
}
[Then(#"X should match:")]
public void ThenShouldMatch(Table table)
{
table.CompareToInstance(_actual);
}
}

Batch cypher queries generated by RestCypherQueryEngine

I am trying to batch together a few cypher queries with the REST API (using the java bindings library) so that only one call is made over the wire. But it seems to not respect the batching on the client side and gives this error:
java.lang.RuntimeException: Error reading as JSON ''
at org.neo4j.rest.graphdb.util.JsonHelper.readJson(JsonHelper.java:57)
at org.neo4j.rest.graphdb.util.JsonHelper.jsonToSingleValue(JsonHelper.java:62)
at org.neo4j.rest.graphdb.RequestResult.toEntity(RequestResult.java:114)
at org.neo4j.rest.graphdb.RequestResult.toMap(RequestResult.java:123)
at org.neo4j.rest.graphdb.batch.RecordingRestRequest.toMap(RecordingRestRequest.java:138)
at org.neo4j.rest.graphdb.ExecutingRestAPI.query(ExecutingRestAPI.java:489)
at org.neo4j.rest.graphdb.ExecutingRestAPI.query(ExecutingRestAPI.java:509)
at org.neo4j.rest.graphdb.RestAPIFacade.query(RestAPIFacade.java:233)
at org.neo4j.rest.graphdb.query.RestCypherQueryEngine.query(RestCypherQueryEngine.java:50)
...
Caused by: java.io.EOFException: No content to map to Object due to end of input
at org.codehaus.jackson.map.ObjectMapper._initForReading(ObjectMapper.java:2766)
at org.codehaus.jackson.map.ObjectMapper._readMapAndClose(ObjectMapper.java:2709)
at org.codehaus.jackson.map.ObjectMapper.readValue(ObjectMapper.java:1854)
at org.neo4j.rest.graphdb.util.JsonHelper.readJson(JsonHelper.java:55)
... 41 more
This is how I am trying to batch them:
graphDatabaseService.getRestAPI().executeBatch(new BatchCallback<Void>() {
#Override
public Void recordBatch(RestAPI batchRestApi) {
String query = "CREATE accounts=({userId:{userId}})-[r:OWNS]->({facebookId:{facebookId}})";
graphDatabaseService.getQueryEngine().query(query, map("userId", 1, "facebookId", "1"));
graphDatabaseService.getQueryEngine().query(query, map("userId", 2, "facebookId", "2"));
graphDatabaseService.getQueryEngine().query(query, map("userId", 3, "facebookId", "3"));
return null;
}
});
I am using noe4j version 1.9 and the corresponding client library. Should this be possible?
Here is a JUnit sample code that works for your batch. Here no string template is used but native methods on the RestAPI object:
public static final DynamicRelationshipType OWNS = DynamicRelationshipType.withName("OWNS");
#Autowired
private SpringRestGraphDatabase graphDatabaseService;
#Test
public void batchTest()
{
Assert.assertNotNull(this.graphDatabaseService);
this.graphDatabaseService.getRestAPI().executeBatch(new BatchCallback<Void>()
{
#Override
public Void recordBatch(RestAPI batchRestApi)
{
for (int counter = 1; counter <= 3; counter++)
{
RestNode userId = batchRestApi.createNode(map("userId", Integer.valueOf(counter)));
RestNode facebookId = batchRestApi.createNode(map("facebookId", Integer.valueOf(counter).toString()));
batchRestApi.createRelationship(userId, facebookId, OWNS, map());
}
return null;
}
});
}

Resources