From 9ec97981276003bf0286b32b6f2336550dafc7cd Mon Sep 17 00:00:00 2001 From: Juliano Alves Date: Wed, 18 Oct 2023 19:07:34 +0100 Subject: [PATCH] adding scalafmt to protoquill --- .gitignore | 1 - .scalafmt.conf | 25 + build.sbt | 3 + .../io/getquill/CalibanIntegration.scala | 18 +- .../CalibanIntegrationNestedSpec.scala | 42 +- .../io/getquill/CalibanIntegrationSpec.scala | 27 +- .../test/scala/io/getquill/CalibanSpec.scala | 14 +- .../src/test/scala/io/getquill/Schema.scala | 7 +- .../io/getquill/example/CalibanExample.scala | 54 +- .../example/CalibanExampleNested.scala | 56 +- .../io/getquill/CassandraZioContext.scala | 171 +++--- .../io/getquill/CassandraZioSession.scala | 17 +- .../io/getquill/cassandrazio/Quill.scala | 53 +- .../cassandra/zio/DecodeNullSpec.scala | 31 +- .../context/cassandra/zio/EncodingSpec.scala | 13 +- .../zio/StreamingWithFetchSpec.scala | 2 +- .../zio/UdtEncodingSessionContextSpec.scala | 20 +- .../cassandra/zio/ZioCassandraSpec.scala | 7 +- .../cassandra/zio/examples/IdiomaticApp.scala | 9 +- .../zio/examples/IdiomaticAppData.scala | 7 +- .../zio/examples/other/ExampleApp.scala | 6 +- .../other/ExampleAppImplicitEnv.scala | 8 +- .../zio/examples/other/PlainApp.scala | 3 +- .../context/cassandra/zio/package.scala | 3 +- .../io/getquill/CassandraAsyncContext.scala | 56 +- .../io/getquill/CassandraContextConfig.scala | 7 +- .../getquill/CassandraCqlSessionContext.scala | 18 +- .../io/getquill/CassandraMirrorContext.scala | 46 +- .../io/getquill/CassandraSyncContext.scala | 46 +- .../src/main/scala/io/getquill/Ops.scala | 35 +- .../main/scala/io/getquill/UdtMetaDsl.scala | 19 +- .../scala/io/getquill/context/Caches.scala | 4 +- .../getquill/context/CassandraSession.scala | 25 +- .../context/cassandra/CassandraContext.scala | 44 +- .../cassandra/CassandraSessionContext.scala | 76 +-- .../cassandra/PrepareStatementCache.scala | 6 +- .../context/cassandra/UdtMetaDslMacro.scala | 29 +- .../cassandra/cluster/SessionBuilder.scala | 24 +- .../cassandra/encoding/CassandraMapper.scala | 6 +- .../encoding/CassandraMapperConversions.scala | 26 +- .../cassandra/encoding/CassandraType.scala | 2 +- .../cassandra/encoding/CassandraTypes.scala | 78 ++- .../encoding/CollectionDecoders.scala | 26 +- .../encoding/CollectionEncoders.scala | 35 +- .../context/cassandra/encoding/Decoders.scala | 52 +- .../context/cassandra/encoding/Encoders.scala | 32 +- .../cassandra/encoding/Encodings.scala | 5 +- .../cassandra/encoding/MirrorFields.scala | 22 +- .../cassandra/encoding/UdtEncoding.scala | 35 +- .../cassandra/encoding/UdtEncodingMacro.scala | 542 ++++++++++-------- .../context/cassandra/encoding/UdtOps.scala | 18 +- .../cassandra/util/FutureConversions.scala | 22 +- .../context/cassandra/util/UdtMetaUtils.scala | 7 +- .../CassandraContextConfigSpec.scala | 2 +- .../cassandra/CassandraContextSpec.scala | 11 +- .../context/cassandra/CollectionsSpec.scala | 2 +- .../context/cassandra/CqlIdiomSpec.scala | 14 +- .../context/cassandra/CqlQuerySpec.scala | 32 +- .../context/cassandra/DecodeNullSpec.scala | 4 +- .../context/cassandra/EncodingSpec.scala | 27 +- .../cassandra/EncodingSpecHelper.scala | 46 +- .../context/cassandra/ListsEncodingSpec.scala | 49 +- .../context/cassandra/MapsEncodingSpec.scala | 30 +- .../cassandra/PeopleCassandraSpec.scala | 4 +- .../QueryResultTypeCassandraSpec.scala | 14 +- .../QueryResultTypeCassandraSyncSpec.scala | 1 - .../context/cassandra/SetsEncodingSpec.scala | 47 +- .../cassandra/ops/CassandraOpsSpec.scala | 2 +- .../getquill/context/cassandra/package.scala | 2 +- .../udt/UdtEncodingMirrorContextSpec.scala | 8 +- .../udt/UdtEncodingSessionContextSpec.scala | 33 +- .../context/cassandra/udt/UdtSpec.scala | 12 +- .../io/getquill/doobie/DoobieContext.scala | 36 +- .../getquill/doobie/DoobieContextBase.scala | 104 ++-- .../doobie/PeopleDoobieReturningSpec.scala | 23 +- .../doobie/PostgresDoobieContextSuite.scala | 9 +- .../io/getquill/doobie/issue/Issue1067.scala | 4 +- .../scala/io/getquill/ZioJdbcContexts.scala | 66 ++- .../scala/io/getquill/context/ZioJdbc.scala | 58 +- .../context/json/PostgresJsonExtensions.scala | 72 ++- .../context/qzio/ResultSetIterator.scala | 17 +- .../context/qzio/ZioJdbcContext.scala | 257 +++++---- .../qzio/ZioJdbcUnderlyingContext.scala | 248 +++++--- .../context/qzio/ZioPrepareContext.scala | 64 ++- .../scala/io/getquill/jdbczio/Quill.scala | 53 +- .../getquill/jdbczio/QuillBaseContext.scala | 164 ++++-- .../scala/io/getquill/PeopleZioSpec.scala | 4 +- .../io/getquill/PrepareZioJdbcSpecBase.scala | 41 +- .../io/getquill/ResultSetIteratorSpec.scala | 39 +- .../src/test/scala/io/getquill/ZioSpec.scala | 8 +- .../io/getquill/examples/IdiomaticApp.scala | 9 +- .../getquill/examples/IdiomaticAppData.scala | 12 +- .../getquill/examples/IdiomaticAppPlain.scala | 18 +- .../examples/other/DataServiceLive.scala | 14 +- .../io/getquill/examples/other/PlainApp.scala | 7 +- .../examples/other/PlainAppDataSource.scala | 5 +- .../examples/other/PlainAppDataSource2.scala | 11 +- .../examples/other/ServiceExample.scala | 13 +- .../io/getquill/examples/other/ZioApp.scala | 3 +- .../examples/other/ZioAppDataSource.scala | 5 +- .../examples/other/ZioAppImplicitEnv.scala | 12 +- .../examples/other/ZioAppManual.scala | 5 +- .../io/getquill/h2/PeopleZioJdbcSpec.scala | 8 +- .../getquill/h2/PeopleZioReturningSpec.scala | 13 +- .../io/getquill/h2/PrepareJdbcSpec.scala | 4 +- .../io/getquill/h2/ProductJdbcSpec.scala | 12 +- .../io/getquill/h2/ZioJdbcContextSpec.scala | 51 +- .../StreamResultsOrBlowUpSpec.scala | 39 +- .../misc/ImplicitEnvPatternSpec.scala | 12 +- .../io/getquill/misc/OnDataSourceSpec.scala | 28 +- .../io/getquill/misc/PrepareJdbcSpec.scala | 4 +- .../misc/StreamingWithFetchSpec.scala | 2 +- .../misc/ZioJdbcUnderlyingContextSpec.scala | 54 +- .../io/getquill/mysql/PeopleZioJdbcSpec.scala | 8 +- .../mysql/PeopleZioReturningSpec.scala | 13 +- .../io/getquill/mysql/PrepareJdbcSpec.scala | 4 +- .../io/getquill/mysql/ProductJdbcSpec.scala | 12 +- .../getquill/mysql/ZioJdbcContextSpec.scala | 61 +- .../getquill/oracle/PeopleZioJdbcSpec.scala | 8 +- .../oracle/PeopleZioReturningSpec.scala | 19 +- .../io/getquill/oracle/PrepareJdbcSpec.scala | 2 +- .../io/getquill/oracle/ProductJdbcSpec.scala | 12 +- .../getquill/oracle/ZioJdbcContextSpec.scala | 61 +- .../postgres/ConnectionLeakTest.scala | 43 +- .../postgres/MultiLevelServiceSpec.scala | 82 +-- .../getquill/postgres/PeopleZioJdbcSpec.scala | 8 +- .../postgres/PeopleZioReturningSpec.scala | 18 +- .../getquill/postgres/PostgresJsonSpec.scala | 16 +- .../getquill/postgres/ProductJdbcSpec.scala | 14 +- .../postgres/ZioJdbcContextSpec.scala | 61 +- .../getquill/sqlite/PeopleZioJdbcSpec.scala | 8 +- .../sqlite/PeopleZioReturningSpec.scala | 13 +- .../io/getquill/sqlite/PrepareJdbcSpec.scala | 4 +- .../io/getquill/sqlite/ProductJdbcSpec.scala | 12 +- .../getquill/sqlite/ZioJdbcContextSpec.scala | 61 +- .../sqlserver/PeopleZioJdbcSpec.scala | 8 +- .../sqlserver/PeopleZioReturningSpec.scala | 19 +- .../getquill/sqlserver/PrepareJdbcSpec.scala | 6 +- .../getquill/sqlserver/ProductJdbcSpec.scala | 12 +- .../sqlserver/ZioJdbcContextSpec.scala | 61 +- .../scala/io/getquill/H2JdbcContext.scala | 6 +- .../scala/io/getquill/MysqlJdbcContext.scala | 6 +- .../scala/io/getquill/OracleJdbcContext.scala | 6 +- .../io/getquill/PostgresJdbcContext.scala | 6 +- .../io/getquill/SqlServerJdbcContext.scala | 6 +- .../scala/io/getquill/SqliteJdbcContext.scala | 6 +- .../getquill/context/jdbc/ArrayDecoders.scala | 99 ++-- .../getquill/context/jdbc/ArrayEncoders.scala | 96 ++-- .../getquill/context/jdbc/BaseContexts.scala | 28 +- .../context/jdbc/BooleanIntEncoding.scala | 3 +- .../io/getquill/context/jdbc/Decoders.scala | 93 ++- .../io/getquill/context/jdbc/Encoders.scala | 121 ++-- .../getquill/context/jdbc/JdbcContext.scala | 62 +- .../context/jdbc/JdbcContextBase.scala | 18 +- .../context/jdbc/JdbcContextTypes.scala | 43 +- .../context/jdbc/JdbcContextVerbExecute.scala | 97 ++-- .../context/jdbc/JdbcContextVerbPrepare.scala | 58 +- .../context/jdbc/ResultSetExtractor.scala | 13 +- .../context/jdbc/SimplifiedContexts.scala | 175 +++--- .../context/jdbc/UUIDObjectEncoding.scala | 6 +- .../context/jdbc/UUIDStringEncoding.scala | 3 +- .../context/jdbc/JdbcContextConfigSpec.scala | 2 +- .../context/jdbc/PrepareJdbcSpecBase.scala | 15 +- .../context/jdbc/h2/BatchValuesJdbcSpec.scala | 4 +- .../jdbc/h2/CaseClassQueryJdbcSpec.scala | 40 +- .../context/jdbc/h2/DistinctJdbcSpec.scala | 28 +- .../context/jdbc/h2/JdbcEncodingSpec.scala | 2 +- .../context/jdbc/h2/OptionJdbcSpec.scala | 4 +- .../context/jdbc/h2/PeopleJdbcSpec.scala | 2 +- .../context/jdbc/h2/ProductJdbcSpec.scala | 12 +- .../io/getquill/context/jdbc/h2/package.scala | 2 +- .../jdbc/mysql/BatchValuesJdbcSpec.scala | 2 +- .../jdbc/mysql/CaseClassQueryJdbcSpec.scala | 40 +- .../context/jdbc/mysql/DistinctJdbcSpec.scala | 28 +- .../context/jdbc/mysql/JdbcEncodingSpec.scala | 7 +- .../context/jdbc/mysql/OptionJdbcSpec.scala | 4 +- .../context/jdbc/mysql/ProductJdbcSpec.scala | 10 +- .../jdbc/mysql/QueryResultTypeJdbcSpec.scala | 4 +- .../getquill/context/jdbc/mysql/package.scala | 8 +- .../jdbc/oracle/BatchValuesJdbcSpec.scala | 6 +- .../jdbc/oracle/CaseClassQueryJdbcSpec.scala | 40 +- .../jdbc/oracle/DepartmentsJdbcSpec.scala | 8 +- .../jdbc/oracle/DistinctJdbcSpec.scala | 28 +- .../jdbc/oracle/JdbcEncodingSpec.scala | 77 ++- .../context/jdbc/oracle/ProductJdbcSpec.scala | 10 +- .../jdbc/oracle/QueryResultTypeJdbcSpec.scala | 6 +- .../context/jdbc/oracle/ScalarValueSpec.scala | 2 +- .../getquill/context/jdbc/oracle/oracle.scala | 8 +- .../jdbc/postgres/ArrayJdbcEncodingSpec.scala | 2 +- .../jdbc/postgres/BatchValuesJdbcSpec.scala | 2 +- .../postgres/CaseClassQueryJdbcSpec.scala | 40 +- .../jdbc/postgres/DistinctJdbcSpec.scala | 28 +- .../jdbc/postgres/FlicerCombinationSpec.scala | 27 +- .../jdbc/postgres/FlicerMapTypesSpec.scala | 22 +- .../postgres/FlicerVariableColumnsSpec.scala | 38 +- .../jdbc/postgres/JdbcArrayOpsSpec.scala | 2 +- .../jdbc/postgres/JdbcContextSpec.scala | 50 +- .../jdbc/postgres/JdbcEncodingSpec.scala | 13 +- .../jdbc/postgres/OnConflictJdbcSpec.scala | 4 +- .../jdbc/postgres/OptionJdbcSpec.scala | 4 +- .../jdbc/postgres/ProductJdbcSpec.scala | 10 +- .../context/jdbc/postgres/package.scala | 8 +- .../jdbc/sqlite/BatchValuesJdbcSpec.scala | 6 +- .../jdbc/sqlite/CaseClassQueryJdbcSpec.scala | 20 +- .../jdbc/sqlite/DistinctJdbcSpec.scala | 28 +- .../jdbc/sqlite/JdbcEncodingSpec.scala | 2 +- .../context/jdbc/sqlite/OptionJdbcSpec.scala | 4 +- .../context/jdbc/sqlite/ProductJdbcSpec.scala | 10 +- .../context/jdbc/sqlite/package.scala | 10 +- .../jdbc/sqlserver/BatchValuesJdbcSpec.scala | 3 +- .../jdbc/sqlserver/DepartmentsJdbcSpec.scala | 2 +- .../jdbc/sqlserver/DistinctJdbcSpec.scala | 28 +- .../jdbc/sqlserver/JdbcContextSpec.scala | 15 +- .../jdbc/sqlserver/JdbcEncodingSpec.scala | 9 +- .../jdbc/sqlserver/OptionJdbcSpec.scala | 12 +- .../jdbc/sqlserver/ProductJdbcSpec.scala | 10 +- .../context/jdbc/sqlserver/package.scala | 10 +- .../scala/io/getquill/DynamicQuerySpec.scala | 2 +- .../io/getquill/InsertAdvancedSpec.scala | 173 +++--- .../test/scala/io/getquill/TestContexts.scala | 2 +- .../getquill/TestMirrorContextTemplate.scala | 11 +- .../customparser/CustomParseTest.scala | 6 +- .../examples/MiniExample_LiftByKeys.scala | 16 +- .../examples/TypeclassExample_Show.scala | 10 +- .../examples/TypeclassUsecase_Typeclass.scala | 45 +- .../getquill/examples/TypelevelUsecase.scala | 35 +- .../io/getquill/ported/AggregationSpec.scala | 99 +++- .../io/getquill/ported/ContextMacroSpec.scala | 36 +- .../io/getquill/ported/package-info.scala | 7 +- .../src/main/scala/io/getquill/Dsl.scala | 22 +- .../src/main/scala/io/getquill/DslModel.scala | 54 +- .../main/scala/io/getquill/DynamicDsl.scala | 47 +- .../scala/io/getquill/DynamicDslModel.scala | 116 ++-- .../scala/io/getquill/MirrorContext.scala | 133 +++-- .../main/scala/io/getquill/OuterSelect.scala | 9 +- .../scala/io/getquill/SqlMirrorContext.scala | 5 +- .../main/scala/io/getquill/StaticSplice.scala | 37 +- .../io/getquill/UpperCaseNonDefault.scala | 8 +- .../scala/io/getquill/context/Context.scala | 75 ++- .../io/getquill/context/ContextEffect.scala | 9 +- .../io/getquill/context/ContextHelp.scala | 20 +- .../getquill/context/ContextVerbPrepare.scala | 28 +- .../context/ContextVerbPrepareLambda.scala | 9 +- .../getquill/context/ContextVerbStream.scala | 16 +- .../context/ContextVerbTranslate.scala | 79 ++- .../DatasourceContextInjectionMacro.scala | 4 +- .../getquill/context/InsertUpdateMacro.scala | 262 +++++---- .../context/InsertUpdateMetaMacro.scala | 10 +- .../scala/io/getquill/context/LiftMacro.scala | 133 +++-- .../io/getquill/context/LiftsExtractor.scala | 46 +- .../io/getquill/context/Particularize.scala | 172 +++--- .../context/ProtoContextSecundus.scala | 79 ++- .../io/getquill/context/QueryExecution.scala | 434 ++++++++------ .../context/QueryExecutionBatch.scala | 265 ++++++--- .../context/QueryExecutionBatchDynamic.scala | 112 ++-- .../QueryExecutionBatchIteration.scala | 106 ++-- .../getquill/context/QueryMetaExtractor.scala | 89 +-- .../io/getquill/context/QueryMetaMacro.scala | 4 +- .../io/getquill/context/QuoteMacro.scala | 51 +- .../context/ReflectiveChainLookup.scala | 28 +- .../io/getquill/context/SchemaMetaMacro.scala | 5 +- .../getquill/context/SplicingBehavior.scala | 6 +- .../getquill/context/StaticSpliceMacro.scala | 33 +- .../io/getquill/context/StaticState.scala | 23 +- .../context/StaticTranslationMacro.scala | 125 ++-- .../getquill/context/SummonDecoderMacro.scala | 6 +- .../io/getquill/context/Unparticular.scala | 37 +- .../context/VerifyFreeVariables.scala | 26 +- .../context/mirror/ArrayMirrorEncoding.scala | 43 +- .../context/mirror/MirrorDecoders.scala | 33 +- .../context/mirror/MirrorEncoders.scala | 35 +- .../io/getquill/context/mirror/Row.scala | 6 +- .../io/getquill/context/sql/SqlContext.scala | 4 +- .../generic/AnyValEncodingMacro.scala | 50 +- .../io/getquill/generic/ArrayEncoding.scala | 26 +- .../io/getquill/generic/ConstructType.scala | 8 +- .../DeconstructElaboratedEntityLevels.scala | 137 +++-- .../getquill/generic/ElaborateStructure.scala | 281 +++++---- .../io/getquill/generic/EncodingDsl.scala | 53 +- .../io/getquill/generic/GenericDecoder.scala | 171 ++++-- .../io/getquill/generic/GenericEncoder.scala | 7 +- .../io/getquill/generic/TupleMember.scala | 16 +- .../scala/io/getquill/generic/WarnMac.scala | 3 +- .../scala/io/getquill/idiom/LoadNaming.scala | 35 +- .../io/getquill/metaprog/ExprAccumulate.scala | 12 +- .../io/getquill/metaprog/ExprModel.scala | 151 +++-- .../io/getquill/metaprog/Extractors.scala | 155 ++--- .../io/getquill/metaprog/SummonParser.scala | 10 +- .../metaprog/SummonTranspileConfig.scala | 24 +- .../getquill/metaprog/etc/ColumnsFlicer.scala | 45 +- .../io/getquill/metaprog/etc/MapFlicer.scala | 26 +- .../io/getquill/parser/BooAstSerializer.scala | 41 +- .../scala/io/getquill/parser/Lifter.scala | 205 ++++--- .../scala/io/getquill/parser/Lifters.scala | 25 +- .../scala/io/getquill/parser/Parser.scala | 452 +++++++++------ .../io/getquill/parser/ParserHelpers.scala | 195 ++++--- .../io/getquill/parser/SerialHelper.scala | 6 +- .../parser/SerializationBehavior.scala | 6 +- .../scala/io/getquill/parser/Serialize.scala | 14 +- .../scala/io/getquill/parser/Unlifter.scala | 198 ++++--- .../io/getquill/parser/engine/History.scala | 4 +- .../io/getquill/parser/engine/Parser.scala | 14 +- .../getquill/parser/engine/ParserChain.scala | 21 +- .../io/getquill/parser/engine/failParse.scala | 14 +- .../scala/io/getquill/quat/QuatMaking.scala | 46 +- .../main/scala/io/getquill/util/Format.scala | 9 +- .../io/getquill/util/Interpolator2.scala | 40 +- .../scala/io/getquill/util/LoadObject.scala | 25 +- .../io/getquill/util/LoadObjectTest.scala | 3 +- .../io/getquill/util/ProtoMessages.scala | 14 +- .../scala/io/getquill/util/SummonMac.scala | 5 +- .../io/getquill/util/debug/PrintMac.scala | 22 +- .../io/getquill/util/prep/Hierarchies.scala | 14 +- .../io/getquill/BatchActionMultiTest.scala | 81 ++- .../scala/io/getquill/BatchActionTest.scala | 246 +++++--- .../test/scala/io/getquill/FlicersSpec.scala | 46 +- .../GenericDecoderCoproductTest.scala | 4 +- ...enericDecoderCoproductTestAdditional.scala | 17 +- .../io/getquill/GenericDecoderTest.scala | 19 +- .../scala/io/getquill/InsertLiftedSpec.scala | 10 +- .../io/getquill/MappedEncodingSpec.scala | 6 +- .../OptionalProductEncodingSpec.scala | 2 +- .../io/getquill/ParticularizationSpec.scala | 48 +- .../scala/io/getquill/QueryMetaTest.scala | 44 +- .../scala/io/getquill/QuerySchemaTest.scala | 20 +- .../scala/io/getquill/QuotationTest.scala | 218 ++++--- .../src/test/scala/io/getquill/Spec.scala | 18 +- .../io/getquill/StructuralTypeSpec.scala | 8 +- .../test/scala/io/getquill/TestEntities.scala | 13 +- .../getquill/anyval/AnyValEncodingSpec.scala | 2 +- .../scala/io/getquill/ast/ActionAstSpec.scala | 4 +- .../context/encoding/OptionalNestedSpec.scala | 21 +- .../getquill/context/sql/ArrayOpsSpec.scala | 8 +- .../sql/BatchUpdateValuesMirrorSpec.scala | 15 +- .../context/sql/BatchValuesSpec.scala | 38 +- .../context/sql/DepartmentsSpec.scala | 61 +- .../getquill/context/sql/DistinctSpec.scala | 15 +- .../getquill/context/sql/EncodingSpec.scala | 177 +++--- .../context/sql/OptionQuerySpec.scala | 27 +- .../context/sql/PeopleAggregationSpec.scala | 20 +- .../context/sql/PeopleReturningSpec.scala | 38 +- .../io/getquill/context/sql/PeopleSpec.scala | 102 ++-- .../io/getquill/context/sql/ProductSpec.scala | 12 +- .../context/sql/QueryResultTypeSpec.scala | 32 +- .../context/sql/Scala3FeaturesSpec.scala | 4 +- .../context/sql/TestContextTemplate.scala | 4 +- .../getquill/context/sql/TestDecoders.scala | 3 +- .../getquill/context/sql/TestEncoders.scala | 3 +- .../sql/base/BatchUpdateValuesSpec.scala | 57 +- .../sql/encoding/ArrayEncodingBaseSpec.scala | 24 +- .../sql/encoding/ArrayEncodingSpec.scala | 4 +- .../getquill/customparser/CustomParser.scala | 17 +- .../io/getquill/idiom/LoadNamingTest.scala | 6 +- .../getquill/metaprog/StaticSpliceSpec.scala | 20 +- .../parser/BooAstSerializerSpec.scala | 31 +- .../scala/io/getquill/quat/QuatSpec.scala | 138 +++-- .../sanity/SimpleBatchWithInfix.scala | 4 +- .../sanity/SimpleMapRunSanityTest.scala | 6 +- .../getquill/sanity/SimpleMapSanityTest.scala | 6 +- .../sanity/SimpleMapSqlSanityTest.scala | 6 +- .../getquill/sanity/SimplePrepareSpec.scala | 10 +- .../context/qzio/ImplicitSyntax.scala | 21 +- .../io/getquill/context/qzio/ZioContext.scala | 24 +- .../context/qzio/ZioTranslateContext.scala | 10 +- 364 files changed, 8367 insertions(+), 5575 deletions(-) create mode 100644 .scalafmt.conf diff --git a/.gitignore b/.gitignore index 107918128..360ea1818 100644 --- a/.gitignore +++ b/.gitignore @@ -39,7 +39,6 @@ project/.bloop/ .jvmopts .DS_Store .vscode -.scalafmt.conf project/metals.sbt project/project/ .sbtopts diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 000000000..f28c062a8 --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1,25 @@ +version = "3.7.14" +maxColumn = 120 +align.preset = most +align.multiline = false +continuationIndent.defnSite = 2 +assumeStandardLibraryStripMargin = true +docstrings.style = Asterisk +docstrings.wrapMaxColumn = 80 +lineEndings = preserve +includeCurlyBraceInSelectChains = false +danglingParentheses.preset = true +optIn.annotationNewlines = true +newlines.alwaysBeforeMultilineDef = false +runner.dialect = scala3 +runner.dialectOverride.allowSignificantIndentation = false +rewrite.rules = [RedundantBraces] + +project.excludePaths = ["glob:**/scalafix/input/**", "glob:**/scalafix/output/**"] + +rewrite.redundantBraces.generalExpressions = false +rewriteTokens = { + "⇒": "=>" + "→": "->" + "←": "<-" +} \ No newline at end of file diff --git a/build.sbt b/build.sbt index 06e421405..cf07b3011 100644 --- a/build.sbt +++ b/build.sbt @@ -14,6 +14,9 @@ inThisBuild( ScmInfo(url("https://github.com/zio/zio-protoquill"), "git:git@github.com:zio/zio-protoquill.git") ), versionScheme := Some("always"), + scalafmtCheck := true, + scalafmtSbtCheck := true, + scalafmtOnCompile := !insideCI.value, ) ) diff --git a/quill-caliban/src/main/scala/io/getquill/CalibanIntegration.scala b/quill-caliban/src/main/scala/io/getquill/CalibanIntegration.scala index 67587e2ac..acf57d4ae 100644 --- a/quill-caliban/src/main/scala/io/getquill/CalibanIntegration.scala +++ b/quill-caliban/src/main/scala/io/getquill/CalibanIntegration.scala @@ -2,9 +2,9 @@ package io.getquill import caliban.CalibanError.ExecutionError import caliban.GraphQL.graphQL -import caliban.introspection.adt.{ __InputValue, __Type, __TypeKind } -import caliban.schema.{ ArgBuilder, Schema, Step } -import caliban.{ InputValue, RootResolver } +import caliban.introspection.adt.{__InputValue, __Type, __TypeKind} +import caliban.schema.{ArgBuilder, Schema, Step} +import caliban.{InputValue, RootResolver} import caliban.execution.Field import caliban.schema.Types import caliban.Value @@ -16,7 +16,7 @@ object CalibanIntegration { def quillColumns(field: Field) = { def recurseFetchFields(field: Field): List[Field] = if (Types.innerType(field.fieldType).kind == __TypeKind.OBJECT) - field.fields.flatMap(recurseFetchFields(_)) + field.fields.flatMap(recurseFetchFields(_)) else List(field) field.fields.flatMap(recurseFetchFields(_)).map(_.name) @@ -28,21 +28,21 @@ object CalibanIntegration { // e.g. for `name` in Person(name: Name, age: Int) this would be Name from which we need first:String, last:String case InputValue.ObjectValue(fields) => fields.toList.flatMap { case (k, v) => flattenToPairs(k, v) } // Need to look at StringValue directly because calling .toInputString on it will give double quoting i.e. "\"value\"" - case Value.StringValue(value) => List((key, value)) - case _ => List((key, value.toInputString)) + case Value.StringValue(value) => List((key, value)) + case _ => List((key, value.toInputString)) } implicit def productArgBuilder[T]: ArgBuilder[ProductArgs[T]] = { case InputValue.ObjectValue(fields) => Right(ProductArgs[T](fields.flatMap { case (k, v) => flattenToPairs(k, v).toMap })) - case other => Left(ExecutionError(s"Can't build a ProductArgs from input $other")) + case other => Left(ExecutionError(s"Can't build a ProductArgs from input $other")) } implicit def productSchema[T](implicit ev: Schema[Any, T]): Schema[Any, ProductArgs[T]] = new Schema[Any, ProductArgs[T]] { def makeOptionalRecurse(f: __InputValue): __InputValue = { - val fieldType = f.`type`() + val fieldType = f.`type`() val optionalFieldType = fieldType.kind match { case __TypeKind.NON_NULL => fieldType.ofType.getOrElse(fieldType) case _ => fieldType @@ -64,4 +64,4 @@ object CalibanIntegration { def resolve(value: ProductArgs[T]): Step[Any] = Step.NullStep } -} // end CalibanIntegration \ No newline at end of file +} // end CalibanIntegration diff --git a/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationNestedSpec.scala b/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationNestedSpec.scala index a26b04457..417d38b34 100644 --- a/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationNestedSpec.scala +++ b/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationNestedSpec.scala @@ -17,36 +17,38 @@ class CalibanIntegrationNestedSpec extends CalibanSpec { object Dao { def personAddress(columns: List[String], filters: Map[String, String]) = Ctx.run { - query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId) + query[PersonT] + .leftJoin(query[AddressT]) + .on((p, a) => p.id == a.ownerId) .map((p, a) => PersonAddressNested(p.id, p.name, p.age, a.map(_.street))) .filterByKeys(filters) .filterColumns(columns) .take(10) - }.provideLayer(zioDS).tap(list => { - println(s"Results: $list for columns: $columns and filters: ${io.getquill.util.Messages.qprint(filters)}") - ZIO.unit - }) - .tapError(e => { - println(s"ERROR $e") - ZIO.unit - }) + }.provideLayer(zioDS) + .tap { list => + println(s"Results: $list for columns: $columns and filters: ${io.getquill.util.Messages.qprint(filters)}") + ZIO.unit + } + .tapError { e => + println(s"ERROR $e") + ZIO.unit + } } } case class Queries( - personAddressNested: Field => (ProductArgs[NestedSchema.PersonAddressNested] => Task[List[NestedSchema.PersonAddressNested]]) + personAddressNested: Field => ( + ProductArgs[NestedSchema.PersonAddressNested] => Task[List[NestedSchema.PersonAddressNested]] + ) ) val api = graphQL( - RootResolver( - Queries( - personAddressNested => - (productArgs => - Nested.Dao.personAddress(quillColumns(personAddressNested), productArgs.keyValues) - ), - ) + RootResolver( + Queries(personAddressNested => + (productArgs => Nested.Dao.personAddress(quillColumns(personAddressNested), productArgs.keyValues)) ) ) + ) "Caliban integration should work for nested object" - { "with no top-level filter" in { @@ -115,7 +117,9 @@ class CalibanIntegrationNestedSpec extends CalibanSpec { } } }""" - unsafeRunQuery(query) mustEqual """{"personAddressNested":[{"id":1,"age":44,"name":{"first":"One","last":"A"}}]}""" + unsafeRunQuery( + query + ) mustEqual """{"personAddressNested":[{"id":1,"age":44,"name":{"first":"One","last":"A"}}]}""" } } -} \ No newline at end of file +} diff --git a/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationSpec.scala b/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationSpec.scala index 5eadbe141..4da53123f 100644 --- a/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationSpec.scala +++ b/quill-caliban/src/test/scala/io/getquill/CalibanIntegrationSpec.scala @@ -17,32 +17,31 @@ class CalibanIntegrationSpec extends CalibanSpec { object Dao { def personAddress(columns: List[String], filters: Map[String, String]): ZIO[Any, Throwable, List[PersonAddress]] = Ctx.run { - query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId) + query[PersonT] + .leftJoin(query[AddressT]) + .on((p, a) => p.id == a.ownerId) .map((p, a) => PersonAddress(p.id, p.first, p.last, p.age, a.map(_.street))) - .filterByKeys(filters) - //.filterColumns(columns) // // + .filterByKeys(filters) + // .filterColumns(columns) // // .take(10) - }.provideLayer(zioDS).tap(list => { + }.provideLayer(zioDS).tap { list => println(s"Results: $list for columns: $columns") ZIO.unit - }) + } } } case class Queries( - personAddressFlat: Field => (ProductArgs[FlatSchema.PersonAddress] => Task[List[FlatSchema.PersonAddress]]), + personAddressFlat: Field => (ProductArgs[FlatSchema.PersonAddress] => Task[List[FlatSchema.PersonAddress]]) ) val api = graphQL( - RootResolver( - Queries( - personAddressFlat => - (productArgs => - Flat.Dao.personAddress(quillColumns(personAddressFlat), productArgs.keyValues) - ) - ) + RootResolver( + Queries(personAddressFlat => + (productArgs => Flat.Dao.personAddress(quillColumns(personAddressFlat), productArgs.keyValues)) ) ) + ) "Caliban integration should work for flat object" - { "with no filters" in { @@ -82,4 +81,4 @@ class CalibanIntegrationSpec extends CalibanSpec { unsafeRunQuery(query) mustEqual """{"personAddressFlat":[{"id":1,"last":"A","street":"123 St"}]}""" } } -} \ No newline at end of file +} diff --git a/quill-caliban/src/test/scala/io/getquill/CalibanSpec.scala b/quill-caliban/src/test/scala/io/getquill/CalibanSpec.scala index 4824abd05..f4968ba12 100644 --- a/quill-caliban/src/test/scala/io/getquill/CalibanSpec.scala +++ b/quill-caliban/src/test/scala/io/getquill/CalibanSpec.scala @@ -19,11 +19,10 @@ trait CalibanSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { override def beforeAll() = { import FlatSchema._ (for { // - _ <- Ctx.run(sql"TRUNCATE TABLE AddressT, PersonT RESTART IDENTITY".as[Delete[PersonT]]) - _ <- Ctx.run(liftQuery(ExampleData.people).foreach(row => query[PersonT].insertValue(row))) - _ <- Ctx.run(liftQuery(ExampleData.addresses).foreach(row => query[AddressT].insertValue(row))) - } yield () - ).provideLayer(zioDS).unsafeRunSync() + _ <- Ctx.run(sql"TRUNCATE TABLE AddressT, PersonT RESTART IDENTITY".as[Delete[PersonT]]) + _ <- Ctx.run(liftQuery(ExampleData.people).foreach(row => query[PersonT].insertValue(row))) + _ <- Ctx.run(liftQuery(ExampleData.addresses).foreach(row => query[AddressT].insertValue(row))) + } yield ()).provideLayer(zioDS).unsafeRunSync() } // override def afterAll() = { @@ -45,8 +44,7 @@ trait CalibanSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { (for { interpreter <- api.interpreter result <- interpreter.execute(queryString) - } yield (result) - ).tapError{ e => + } yield (result)).tapError { e => fail("GraphQL Validation Error", e) ZIO.unit }.unsafeRunSync() @@ -56,4 +54,4 @@ trait CalibanSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { else output.data.toString } // end unsafeRunQuery -} \ No newline at end of file +} diff --git a/quill-caliban/src/test/scala/io/getquill/Schema.scala b/quill-caliban/src/test/scala/io/getquill/Schema.scala index 3ebaf771f..845ddb1c8 100644 --- a/quill-caliban/src/test/scala/io/getquill/Schema.scala +++ b/quill-caliban/src/test/scala/io/getquill/Schema.scala @@ -20,7 +20,7 @@ object FlatSchema { List( PersonAddress(1, "One", "A", 44, Some("123 St")), PersonAddress(2, "Two", "B", 55, Some("123 St")), - PersonAddress(3, "Three", "C", 66, None), + PersonAddress(3, "Three", "C", 66, None) ) } } @@ -47,6 +47,7 @@ object NestedSchema { List( PersonAddressNested(1, Name("One", "A"), 44, Some("123 St")), PersonAddressNested(2, Name("Two", "B"), 55, Some("123 St")), - PersonAddressNested(3, Name("Three", "C"), 66, None), - ) } + PersonAddressNested(3, Name("Three", "C"), 66, None) + ) + } } diff --git a/quill-caliban/src/test/scala/io/getquill/example/CalibanExample.scala b/quill-caliban/src/test/scala/io/getquill/example/CalibanExample.scala index ff0c0e5a8..d637864a8 100644 --- a/quill-caliban/src/test/scala/io/getquill/example/CalibanExample.scala +++ b/quill-caliban/src/test/scala/io/getquill/example/CalibanExample.scala @@ -11,7 +11,7 @@ import io.getquill.context.qzio.ImplicitSyntax._ import io.getquill.context.ZioJdbc._ import io.getquill.util.LoadConfig import zio.Console.printLine -import zio.{ ZIOApp, ExitCode, URIO, Task } +import zio.{ZIOApp, ExitCode, URIO, Task} import java.io.Closeable import javax.sql.DataSource @@ -23,40 +23,41 @@ import io.getquill.util.ContextLogger import io.getquill import io.getquill.FlatSchema._ - object Dao { case class PersonAddressPlanQuery(plan: String, pa: List[PersonAddress]) private val logger = ContextLogger(classOf[Dao.type]) object Ctx extends PostgresZioJdbcContext(Literal) import Ctx._ - lazy val ds = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource + lazy val ds = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource given Implicit[DataSource] = Implicit(ds) inline def q(inline columns: List[String], inline filters: Map[String, String]) = quote { - query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId) + query[PersonT] + .leftJoin(query[AddressT]) + .on((p, a) => p.id == a.ownerId) .map((p, a) => PersonAddress(p.id, p.first, p.last, p.age, a.map(_.street))) .filterColumns(columns) .filterByKeys(filters) .take(10) } inline def plan(inline columns: List[String], inline filters: Map[String, String]) = - quote { sql"EXPLAIN ${q(columns, filters)}".pure.as[Query[String]] } + quote(sql"EXPLAIN ${q(columns, filters)}".pure.as[Query[String]]) def personAddress(columns: List[String], filters: Map[String, String]) = { println(s"Getting columns: $columns") - run(q(columns, filters)).implicitDS.mapError(e => { + run(q(columns, filters)).implicitDS.mapError { e => logger.underlying.error("personAddress query failed", e) e - }) + } } def personAddressPlan(columns: List[String], filters: Map[String, String]) = - run(plan(columns, filters), OuterSelectWrap.Never).map(_.mkString("\n")).implicitDS.mapError(e => { + run(plan(columns, filters), OuterSelectWrap.Never).map(_.mkString("\n")).implicitDS.mapError { e => logger.underlying.error("personAddressPlan query failed", e) e - }) + } def resetDatabase() = (for { @@ -69,42 +70,39 @@ object Dao { object CalibanExample extends zio.ZIOAppDefault { case class Queries( - personAddress: Field => (ProductArgs[PersonAddress] => Task[List[PersonAddress]]), - personAddressPlan: Field => (ProductArgs[PersonAddress] => Task[Dao.PersonAddressPlanQuery]) + personAddress: Field => (ProductArgs[PersonAddress] => Task[List[PersonAddress]]), + personAddressPlan: Field => (ProductArgs[PersonAddress] => Task[Dao.PersonAddressPlanQuery]) ) val endpoints = - graphQL( + graphQL( RootResolver( Queries( - personAddress => - (productArgs => - Dao.personAddress(quillColumns(personAddress), productArgs.keyValues) - ), + personAddress => (productArgs => Dao.personAddress(quillColumns(personAddress), productArgs.keyValues)), personAddressPlan => (productArgs => { val cols = quillColumns(personAddressPlan) - (Dao.personAddressPlan(cols, productArgs.keyValues) zip Dao.personAddress(cols, productArgs.keyValues)).map( - (pa, plan) => Dao.PersonAddressPlanQuery(pa, plan) - ) + (Dao.personAddressPlan(cols, productArgs.keyValues) zip Dao.personAddress(cols, productArgs.keyValues)) + .map((pa, plan) => Dao.PersonAddressPlanQuery(pa, plan)) }) ) ) ).interpreter val myApp = for { - _ <- Dao.resetDatabase() + _ <- Dao.resetDatabase() interpreter <- endpoints - _ <- Server.start( - port = 8088, - http = Http.collectHttp[Request] { case _ -> !! / "api" / "graphql" => - ZHttpAdapter.makeHttpService(interpreter) - } - ) - .forever + _ <- Server + .start( + port = 8088, + http = Http.collectHttp[Request] { case _ -> !! / "api" / "graphql" => + ZHttpAdapter.makeHttpService(interpreter) + } + ) + .forever } yield () override def run = myApp.exitCode -} // end CalibanExample \ No newline at end of file +} // end CalibanExample diff --git a/quill-caliban/src/test/scala/io/getquill/example/CalibanExampleNested.scala b/quill-caliban/src/test/scala/io/getquill/example/CalibanExampleNested.scala index f1f717d4c..54d2e248d 100644 --- a/quill-caliban/src/test/scala/io/getquill/example/CalibanExampleNested.scala +++ b/quill-caliban/src/test/scala/io/getquill/example/CalibanExampleNested.scala @@ -11,7 +11,7 @@ import io.getquill.context.qzio.ImplicitSyntax._ import io.getquill.context.ZioJdbc._ import io.getquill.util.LoadConfig import zio.Console.printLine -import zio.{ ZIOApp, ExitCode, URIO, Task } +import zio.{ZIOApp, ExitCode, URIO, Task} import java.io.Closeable import javax.sql.DataSource @@ -22,40 +22,41 @@ import io.getquill.CalibanIntegration._ import io.getquill.util.ContextLogger import io.getquill.NestedSchema._ - object DaoNested { case class PersonAddressPlanQuery(plan: String, pa: List[PersonAddressNested]) private val logger = ContextLogger(classOf[DaoNested.type]) object Ctx extends PostgresZioJdbcContext(Literal) import Ctx._ - lazy val ds = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource + lazy val ds = JdbcContextConfig(LoadConfig("testPostgresDB")).dataSource given Implicit[DataSource] = Implicit(ds) inline def q(inline columns: List[String], inline filters: Map[String, String]) = quote { - query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId) + query[PersonT] + .leftJoin(query[AddressT]) + .on((p, a) => p.id == a.ownerId) .map((p, a) => PersonAddressNested(p.id, p.name, p.age, a.map(_.street))) .filterColumns(columns) .filterByKeys(filters) .take(10) } inline def plan(inline columns: List[String], inline filters: Map[String, String]) = - quote { sql"EXPLAIN ${q(columns, filters)}".pure.as[Query[String]] } + quote(sql"EXPLAIN ${q(columns, filters)}".pure.as[Query[String]]) def personAddress(columns: List[String], filters: Map[String, String]) = { println(s"Getting columns: $columns") - run(q(columns, filters)).implicitDS.mapError(e => { + run(q(columns, filters)).implicitDS.mapError { e => logger.underlying.error("personAddress query failed", e) e - }) + } } def personAddressPlan(columns: List[String], filters: Map[String, String]) = - run(plan(columns, filters), OuterSelectWrap.Never).map(_.mkString("\n")).implicitDS.mapError(e => { + run(plan(columns, filters), OuterSelectWrap.Never).map(_.mkString("\n")).implicitDS.mapError { e => logger.underlying.error("personAddressPlan query failed", e) e - }) + } def resetDatabase() = (for { @@ -69,43 +70,42 @@ object CalibanExampleNested extends zio.ZIOAppDefault { private val logger = ContextLogger(classOf[CalibanExampleNested.type]) case class Queries( - personAddress: Field => (ProductArgs[PersonAddressNested] => Task[List[PersonAddressNested]]), - personAddressPlan: Field => (ProductArgs[PersonAddressNested] => Task[DaoNested.PersonAddressPlanQuery]) + personAddress: Field => (ProductArgs[PersonAddressNested] => Task[List[PersonAddressNested]]), + personAddressPlan: Field => (ProductArgs[PersonAddressNested] => Task[DaoNested.PersonAddressPlanQuery]) ) val endpoints = - graphQL( + graphQL( RootResolver( Queries( - personAddress => - (productArgs => - DaoNested.personAddress(quillColumns(personAddress), productArgs.keyValues) - ), + personAddress => (productArgs => DaoNested.personAddress(quillColumns(personAddress), productArgs.keyValues)), personAddressPlan => (productArgs => { val cols = quillColumns(personAddressPlan) logger.underlying.info(s"Selected Columns: ${cols}") - (DaoNested.personAddressPlan(cols, productArgs.keyValues) zip DaoNested.personAddress(cols, productArgs.keyValues)).map( - (pa, plan) => DaoNested.PersonAddressPlanQuery(pa, plan) - ) + (DaoNested.personAddressPlan(cols, productArgs.keyValues) zip DaoNested.personAddress( + cols, + productArgs.keyValues + )).map((pa, plan) => DaoNested.PersonAddressPlanQuery(pa, plan)) }) ) ) ).interpreter val myApp = for { - _ <- DaoNested.resetDatabase() + _ <- DaoNested.resetDatabase() interpreter <- endpoints - _ <- Server.start( - port = 8088, - http = Http.collectHttp[Request] { case _ -> !! / "api" / "graphql" => - ZHttpAdapter.makeHttpService(interpreter) - } - ) - .forever + _ <- Server + .start( + port = 8088, + http = Http.collectHttp[Request] { case _ -> !! / "api" / "graphql" => + ZHttpAdapter.makeHttpService(interpreter) + } + ) + .forever } yield () override def run = myApp.exitCode -} // end CalibanExampleNested \ No newline at end of file +} // end CalibanExampleNested diff --git a/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala b/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala index 4265327c9..03129875b 100644 --- a/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala +++ b/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioContext.scala @@ -1,14 +1,14 @@ package io.getquill -import com.datastax.oss.driver.api.core.cql.{ AsyncResultSet, BoundStatement, Row } +import com.datastax.oss.driver.api.core.cql.{AsyncResultSet, BoundStatement, Row} import io.getquill.CassandraZioContext._ import io.getquill.context.ExecutionInfo -import io.getquill.context.cassandra.{ CassandraRowContext, CqlIdiom } +import io.getquill.context.cassandra.{CassandraRowContext, CqlIdiom} import io.getquill.context.qzio.ZioContext import io.getquill.util.Messages.fail import io.getquill.util.ContextLogger import zio.stream.ZStream -import zio.{ Chunk, ChunkBuilder, ZEnvironment, ZIO } +import zio.{Chunk, ChunkBuilder, ZEnvironment, ZIO} import scala.jdk.CollectionConverters._ import scala.util.Try @@ -19,56 +19,56 @@ import io.getquill.context.AsyncFutureCache import scala.annotation.targetName object CassandraZioContext { - type CIO[T] = ZIO[CassandraZioSession, Throwable, T] + type CIO[T] = ZIO[CassandraZioSession, Throwable, T] type CStream[T] = ZStream[CassandraZioSession, Throwable, T] } /** - * Quill context that executes Cassandra queries inside of ZIO. Unlike most other contexts - * that require passing in a Data Source, this context takes in a `ZioCassandraSession` - * as a resource dependency which can be provided later (see the `ZioCassandraSession` object for helper methods - * that assist in doing this). + * Quill context that executes Cassandra queries inside of ZIO. Unlike most + * other contexts that require passing in a Data Source, this context takes in a + * `ZioCassandraSession` as a resource dependency which can be provided later + * (see the `ZioCassandraSession` object for helper methods that assist in doing + * this). * * The resource dependency itself is just a Has[ZioCassandraSession] * - * Various methods in the `io.getquill.ZioCassandraSession` can assist in simplifying it's creation, for example, you can - * provide a `Config` object instead of a `ZioCassandraSession` like this - * (note that the resulting ZioCassandraSession has a closing bracket). - * {{ - * val zioSession = - * ZioCassandraSession.fromPrefix("testStreamDB") - * }} + * Various methods in the `io.getquill.ZioCassandraSession` can assist in + * simplifying it's creation, for example, you can provide a `Config` object + * instead of a `ZioCassandraSession` like this (note that the resulting + * ZioCassandraSession has a closing bracket). {{ val zioSession = + * ZioCassandraSession.fromPrefix("testStreamDB") }} * - * If you are using a Plain Scala app however, you will need to manually run it e.g. using zio.Runtime - * {{ - * Runtime.default.unsafeRun(MyZioContext.run(query[Person]).provideCustomLayer(zioSession)) + * If you are using a Plain Scala app however, you will need to manually run it + * e.g. using zio.Runtime {{ + * Runtime.default.unsafeRun(MyZioContext.run(query[Person]).provideCustomLayer(zioSession)) * }} * - * ProtoQuill Note: Zio Cassandra context does not implement `prepare`. This it can extend StandardContext - * in Scala2-Quill because presence of `prepare___` methods is not enforced. Due to stricter type requirements - * in Dotty however, this is not allowed here. + * ProtoQuill Note: Zio Cassandra context does not implement `prepare`. This it + * can extend StandardContext in Scala2-Quill because presence of `prepare___` + * methods is not enforced. Due to stricter type requirements in Dotty however, + * this is not allowed here. */ class CassandraZioContext[+N <: NamingStrategy](val naming: N) - extends CassandraStandardContext[N] - with ZioContext[CqlIdiom, N] - with Context[CqlIdiom, N] { + extends CassandraStandardContext[N] + with ZioContext[CqlIdiom, N] + with Context[CqlIdiom, N] { private val logger = ContextLogger(classOf[CassandraZioContext[_]]) - override type Error = Throwable + override type Error = Throwable override type Environment = CassandraZioSession override type StreamResult[T] = CStream[T] override type RunActionResult = Unit - override type Result[T] = CIO[T] + override type Result[T] = CIO[T] - override type RunQueryResult[T] = List[T] + override type RunQueryResult[T] = List[T] override type RunQuerySingleResult[T] = T - override type RunBatchActionResult = Unit + override type RunBatchActionResult = Unit override type PrepareRow = BoundStatement - override type ResultRow = Row - override type Session = CassandraZioSession + override type ResultRow = Row + override type Session = CassandraZioSession // Don't need a Runner method because for the Zio Cassandra Context the // ExecutionContext is provided by the ZIO runtime. @@ -76,15 +76,23 @@ class CassandraZioContext[+N <: NamingStrategy](val naming: N) override protected def context: Runner = () @targetName("runQueryDefault") - inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[CassandraZioSession, Throwable, List[T]] = InternalApi.runQueryDefault(quoted) + inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[CassandraZioSession, Throwable, List[T]] = + InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ZIO[CassandraZioSession, Throwable, List[T]] = InternalApi.runQuery(quoted, wrap) + inline def run[T]( + inline quoted: Quoted[Query[T]], + inline wrap: OuterSelectWrap + ): ZIO[CassandraZioSession, Throwable, List[T]] = InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") - inline def run[T](inline quoted: Quoted[T]): ZIO[CassandraZioSession, Throwable, T] = InternalApi.runQuerySingle(quoted) + inline def run[T](inline quoted: Quoted[T]): ZIO[CassandraZioSession, Throwable, T] = + InternalApi.runQuerySingle(quoted) @targetName("runAction") - inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[CassandraZioSession, Throwable, Unit] = InternalApi.runAction(quoted) + inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[CassandraZioSession, Throwable, Unit] = + InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[CassandraZioSession, Throwable, Unit] = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[CassandraZioSession, Throwable, Unit] = InternalApi.runBatchAction(quoted, 1) protected def page(rs: AsyncResultSet): CIO[Chunk[Row]] = ZIO.succeed { val builder = ChunkBuilder.make[Row](rs.remaining()) @@ -96,16 +104,12 @@ class CassandraZioContext[+N <: NamingStrategy](val naming: N) private[getquill] def execute(cql: String, prepare: Prepare, csession: CassandraZioSession, fetchSize: Option[Int]) = simpleBlocking { - prepareRowAndLog(cql, prepare) - .mapAttempt { p => - fetchSize match { - case Some(value) => p.setPageSize(value) - case None => p - } + prepareRowAndLog(cql, prepare).mapAttempt { p => + fetchSize match { + case Some(value) => p.setPageSize(value) + case None => p } - .flatMap(p => { - ZIO.fromCompletionStage(csession.session.executeAsync(p)) - }) + }.flatMap(p => ZIO.fromCompletionStage(csession.session.executeAsync(p))) } val streamBlocker: ZStream[Any, Nothing, Any] = @@ -117,21 +121,26 @@ class CassandraZioContext[+N <: NamingStrategy](val naming: N) } yield () } - def streamQuery[T](fetchSize: Option[Int], cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner) = { + def streamQuery[T]( + fetchSize: Option[Int], + cql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner) = { val stream = for { csession <- ZStream.service[CassandraZioSession] - rs <- ZStream.fromZIO(execute(cql, prepare, csession, fetchSize)) + rs <- ZStream.fromZIO(execute(cql, prepare, csession, fetchSize)) row <- ZStream.unfoldChunkZIO(rs) { rs => - // keep taking pages while chunk sizes are non-zero - page(rs).flatMap { chunk => - (chunk.nonEmpty, rs.hasMorePages) match { - case (true, true) => ZIO.fromCompletionStage(rs.fetchNextPage()).map(rs => Some((chunk, rs))) - case (true, false) => ZIO.some((chunk, rs)) - case (_, _) => ZIO.none - } - } - } + // keep taking pages while chunk sizes are non-zero + page(rs).flatMap { chunk => + (chunk.nonEmpty, rs.hasMorePages) match { + case (true, true) => ZIO.fromCompletionStage(rs.fetchNextPage()).map(rs => Some((chunk, rs))) + case (true, false) => ZIO.some((chunk, rs)) + case (_, _) => ZIO.none + } + } + } } yield extractor(row, csession) // Run the entire chunking flow on the blocking executor @@ -142,38 +151,45 @@ class CassandraZioContext[+N <: NamingStrategy](val naming: N) private[getquill] def simpleBlocking[R, E, A](zio: ZIO[R, E, A]): ZIO[R, E, A] = ZIO.blocking(zio) - def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): CIO[List[T]] = simpleBlocking { + def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)( + info: ExecutionInfo, + dc: Runner + ): CIO[List[T]] = simpleBlocking { streamQuery[T](None, cql, prepare, extractor)(info, dc).runCollect.map(_.toList) } - def executeQuerySingle[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): CIO[T] = simpleBlocking { + def executeQuerySingle[T]( + cql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): CIO[T] = simpleBlocking { for { - csession <- ZIO.service[CassandraZioSession] - rs <- execute(cql, prepare, csession, Some(1)) //pull only one record from the DB explicitly. - rows <- ZIO.attempt(rs.currentPage()) + csession <- ZIO.service[CassandraZioSession] + rs <- execute(cql, prepare, csession, Some(1)) // pull only one record from the DB explicitly. + rows <- ZIO.attempt(rs.currentPage()) singleRow <- ZIO.attempt(handleSingleResult(cql, rows.asScala.map(row => extractor(row, csession)).toList)) } yield singleRow } - def executeAction(cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): CIO[Unit] = simpleBlocking { - for { - csession <- ZIO.service[CassandraZioSession] - r <- prepareRowAndLog(cql, prepare).provideEnvironment(ZEnvironment(csession)) - _ <- ZIO.fromCompletionStage(csession.session.executeAsync(r)) - } yield () - } + def executeAction(cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): CIO[Unit] = + simpleBlocking { + for { + csession <- ZIO.service[CassandraZioSession] + r <- prepareRowAndLog(cql, prepare).provideEnvironment(ZEnvironment(csession)) + _ <- ZIO.fromCompletionStage(csession.session.executeAsync(r)) + } yield () + } - //TODO: Cassandra batch actions applicable to insert/update/delete and described here: + // TODO: Cassandra batch actions applicable to insert/update/delete and described here: // https://docs.datastax.com/en/dse/6.0/cql/cql/cql_reference/cql_commands/cqlBatch.html def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): CIO[Unit] = simpleBlocking { for { env <- ZIO.service[CassandraZioSession] _ <- { val batchGroups = - groups.flatMap { - case BatchGroup(cql, prepare) => - prepare - .map(prep => executeAction(cql, prep)(info, dc).provideEnvironment(ZEnvironment(env))) + groups.flatMap { case BatchGroup(cql, prepare) => + prepare + .map(prep => executeAction(cql, prep)(info, dc).provideEnvironment(ZEnvironment(env))) } ZIO.collectAll(batchGroups) } @@ -182,10 +198,10 @@ class CassandraZioContext[+N <: NamingStrategy](val naming: N) private[getquill] def prepareRowAndLog(cql: String, prepare: Prepare = identityPrepare): CIO[PrepareRow] = for { - env <- ZIO.environment[CassandraZioSession] + env <- ZIO.environment[CassandraZioSession] csession = env.get[CassandraZioSession] boundStatement <- { - ZIO.fromFuture { implicit ec => csession.prepareAsync(cql) } + ZIO.fromFuture(implicit ec => csession.prepareAsync(cql)) .mapAttempt(row => prepare(row, csession)) .map(p => p._2) } @@ -193,14 +209,15 @@ class CassandraZioContext[+N <: NamingStrategy](val naming: N) def probingSession: Option[CassandraZioSession] = None - def probe(statement: String): scala.util.Try[_] = { + def probe(statement: String): scala.util.Try[_] = probingSession match { case Some(csession) => Try(csession.prepare(statement)) case None => Try(()) } - } - override def close(): Unit = fail("Zio Cassandra Session does not need to be closed because it does not keep internal state.") + override def close(): Unit = fail( + "Zio Cassandra Session does not need to be closed because it does not keep internal state." + ) } diff --git a/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioSession.scala b/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioSession.scala index 60533a483..82b30970e 100644 --- a/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioSession.scala +++ b/quill-cassandra-zio/src/main/scala/io/getquill/CassandraZioSession.scala @@ -2,15 +2,18 @@ package io.getquill import com.datastax.oss.driver.api.core.CqlSession import com.typesafe.config.Config -import io.getquill.context.{ AsyncFutureCache, CassandraSession, SyncCache } +import io.getquill.context.{AsyncFutureCache, CassandraSession, SyncCache} import io.getquill.util.LoadConfig -import zio.{ ZIO, ZLayer } +import zio.{ZIO, ZLayer} import scala.tools.nsc.interpreter.Naming.sessionNames case class CassandraZioSession( - override val session: CqlSession, + override val session: CqlSession, override val preparedStatementCacheSize: Long -) extends CassandraSession with SyncCache with AsyncFutureCache with AutoCloseable +) extends CassandraSession + with SyncCache + with AsyncFutureCache + with AutoCloseable object CassandraZioSession { val live: ZLayer[CassandraContextConfig, Throwable, CassandraZioSession] = @@ -19,8 +22,8 @@ object CassandraZioSession { config <- ZIO.service[CassandraContextConfig] // Evaluate the configuration inside of 'effect' and then create the session from it session <- ZIO.fromAutoCloseable( - ZIO.attempt(CassandraZioSession(config.session, config.preparedStatementCacheSize)) - ) + ZIO.attempt(CassandraZioSession(config.session, config.preparedStatementCacheSize)) + ) } yield session } @@ -30,4 +33,4 @@ object CassandraZioSession { def fromConfig(config: Config) = fromContextConfig(CassandraContextConfig(config)) // Call the by-name constructor for the construction to fail inside of the effect if it fails def fromPrefix(configPrefix: String) = fromContextConfig(CassandraContextConfig(LoadConfig(configPrefix))) -} \ No newline at end of file +} diff --git a/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala b/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala index c0027139c..82147791c 100644 --- a/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala +++ b/quill-cassandra-zio/src/main/scala/io/getquill/cassandrazio/Quill.scala @@ -22,26 +22,26 @@ object Quill { } case class Cassandra[+N <: NamingStrategy](val naming: N, session: CassandraZioSession) - extends CassandraStandardContext[N] - with ZioContext[CqlIdiom, N] - with Context[CqlIdiom, N] { + extends CassandraStandardContext[N] + with ZioContext[CqlIdiom, N] + with Context[CqlIdiom, N] { private val logger = ContextLogger(classOf[Quill.Cassandra[_]]) - override type Error = Throwable + override type Error = Throwable override type Environment = Any override type StreamResult[T] = ZStream[Any, Throwable, T] override type RunActionResult = Unit - override type Result[T] = ZIO[Any, Throwable, T] + override type Result[T] = ZIO[Any, Throwable, T] - override type RunQueryResult[T] = List[T] + override type RunQueryResult[T] = List[T] override type RunQuerySingleResult[T] = T - override type RunBatchActionResult = Unit + override type RunBatchActionResult = Unit override type PrepareRow = BoundStatement - override type ResultRow = Row - override type Session = CassandraZioSession + override type ResultRow = Row + override type Session = CassandraZioSession // Don't need a Runner method because for the Zio Cassandra Context the // ExecutionContext is provided by the ZIO runtime. @@ -51,25 +51,44 @@ object Quill { val underlying: CassandraZioContext[N] = new CassandraZioContext[N](naming) @targetName("runQueryDefault") - inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[Any, Throwable, List[T]] = InternalApi.runQueryDefault(quoted) + inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[Any, Throwable, List[T]] = + InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ZIO[Any, Throwable, List[T]] = InternalApi.runQuery(quoted, wrap) + inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ZIO[Any, Throwable, List[T]] = + InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): ZIO[Any, Throwable, T] = InternalApi.runQuerySingle(quoted) @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[Any, Throwable, Unit] = InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, Throwable, Unit] = InternalApi.runBatchAction(quoted, 1) - - def streamQuery[T](fetchSize: Option[Int], cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZStream[Any, Throwable, T] = + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[Any, Throwable, Unit] = InternalApi.runBatchAction(quoted, 1) + + def streamQuery[T]( + fetchSize: Option[Int], + cql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): ZStream[Any, Throwable, T] = onSessionStream(underlying.streamQuery(fetchSize, cql, prepare, extractor)(info, dc)) - def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZIO[Any, Throwable, List[T]] = + def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)( + info: ExecutionInfo, + dc: Runner + ): ZIO[Any, Throwable, List[T]] = onSession(underlying.executeQuery(cql, prepare, extractor)(info, dc)) - def executeQuerySingle[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZIO[Any, Throwable, T] = + def executeQuerySingle[T]( + cql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): ZIO[Any, Throwable, T] = onSession(underlying.executeQuerySingle(cql, prepare, extractor)(info, dc)) - def executeAction(cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): ZIO[Any, Throwable, Unit] = + def executeAction(cql: String, prepare: Prepare = identityPrepare)( + info: ExecutionInfo, + dc: Runner + ): ZIO[Any, Throwable, Unit] = onSession(underlying.executeAction(cql, prepare)(info, dc)) def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): ZIO[Any, Throwable, Unit] = diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/DecodeNullSpec.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/DecodeNullSpec.scala index bce2a1e7e..354e8f447 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/DecodeNullSpec.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/DecodeNullSpec.scala @@ -11,26 +11,27 @@ class DecodeNullSpec extends ZioCassandraSpec { val ret = for { - _ <- testZioDB.run(writeEntities.delete) - _ <- testZioDB.run(writeEntities.insertValue(lift(insertee))) + _ <- testZioDB.run(writeEntities.delete) + _ <- testZioDB.run(writeEntities.insertValue(lift(insertee))) result <- testZioDB.run(query[DecodeNullTestEntity]) } yield { result } - result(ret.foldCause( - cause => { - cause.isDie must equal(true) - cause.dieOption match { - case Some(e: Exception) => - e.isInstanceOf[IllegalStateException] must equal(true) - case _ => - fail("Expected Fatal Error to be here (and to be a IllegalStateException") - } - }, - success => - fail("Expected Exception IllegalStateException but operation succeeded") - )) + result( + ret.foldCause( + cause => { + cause.isDie must equal(true) + cause.dieOption match { + case Some(e: Exception) => + e.isInstanceOf[IllegalStateException] must equal(true) + case _ => + fail("Expected Fatal Error to be here (and to be a IllegalStateException") + } + }, + success => fail("Expected Exception IllegalStateException but operation succeeded") + ) + ) () } } diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/EncodingSpec.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/EncodingSpec.scala index 60cede849..efb990870 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/EncodingSpec.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/EncodingSpec.scala @@ -9,8 +9,8 @@ class EncodingSpec extends EncodingSpecHelper with ZioCassandraSpec { import testZioDB._ val ret = for { - _ <- testZioDB.run(query[EncodingTestEntity].delete) - _ <- testZioDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) + _ <- testZioDB.run(query[EncodingTestEntity].delete) + _ <- testZioDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) result <- testZioDB.run(query[EncodingTestEntity]) } yield { result @@ -23,14 +23,13 @@ class EncodingSpec extends EncodingSpecHelper with ZioCassandraSpec { "encodes collections" - { "stream" in { import testZioDB._ - inline def q = quote { - (list: Query[Int]) => - query[EncodingTestEntity].filter(t => list.contains(t.id)) + inline def q = quote { (list: Query[Int]) => + query[EncodingTestEntity].filter(t => list.contains(t.id)) } val ret = for { - _ <- testZioDB.run(query[EncodingTestEntity].delete) - _ <- testZioDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) + _ <- testZioDB.run(query[EncodingTestEntity].delete) + _ <- testZioDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) result <- testZioDB.run(q(liftQuery(insertValues.map(_.id)))) } yield { result diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/StreamingWithFetchSpec.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/StreamingWithFetchSpec.scala index 9ce9f5962..5d09cb396 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/StreamingWithFetchSpec.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/StreamingWithFetchSpec.scala @@ -24,7 +24,7 @@ class StreamingWithFetchSpec extends ZioCassandraSpec with QueryResultTypeCassan result( context.run(liftQuery(entities).foreach(e => insert(e))) *> context.stream(selectAll, 7).runCollect.map(_.toList.size) - ) must equal(entities.size) //must contain theSameElementsAs entities + ) must equal(entities.size) // must contain theSameElementsAs entities } "with entities == fetch size" in { val entities = produceEntities(11) diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/UdtEncodingSessionContextSpec.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/UdtEncodingSessionContextSpec.scala index a5df7dfff..db8ce776e 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/UdtEncodingSessionContextSpec.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/UdtEncodingSessionContextSpec.scala @@ -46,13 +46,19 @@ class UdtEncodingSessionContextSpec extends UdtSpec with ZioCassandraSpec { "without meta" in { case class WithEverything(id: Int, personal: Personal, nameList: List[Name]) - val e = WithEverything(1, Personal(1, "strt", - Name("first", Some("last")), - Some(Name("f", None)), - List("e"), - Set(1, 2), - Map(1 -> "1", 2 -> "2")), - List(Name("first", None))) + val e = WithEverything( + 1, + Personal( + 1, + "strt", + Name("first", Some("last")), + Some(Name("f", None)), + List("e"), + Set(1, 2), + Map(1 -> "1", 2 -> "2") + ), + List(Name("first", None)) + ) ctx.run(query[WithEverything].insertValue(lift(e))).runSyncUnsafe() ctx.run(query[WithEverything].filter(_.id == 1)).runSyncUnsafe().headOption must contain(e) } diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/ZioCassandraSpec.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/ZioCassandraSpec.scala index ad19473e9..36c30c261 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/ZioCassandraSpec.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/ZioCassandraSpec.scala @@ -24,16 +24,17 @@ trait ZioCassandraSpec extends Spec { () } - override def afterAll(): Unit = { + override def afterAll(): Unit = pool.close() - } def accumulate[T](stream: ZStream[CassandraZioSession, Throwable, T]): ZIO[CassandraZioSession, Throwable, List[T]] = stream.run(ZSink.collectAll).map(_.toList) def result[T](stream: ZStream[CassandraZioSession, Throwable, T]): List[T] = Unsafe.unsafe { implicit unsafe => - Runtime.default.unsafe.run(stream.run(ZSink.collectAll).map(_.toList).provideEnvironment(ZEnvironment(pool))).getOrThrow() + Runtime.default.unsafe + .run(stream.run(ZSink.collectAll).map(_.toList).provideEnvironment(ZEnvironment(pool))) + .getOrThrow() } def result[T](qzio: ZIO[CassandraZioSession, Throwable, T]): T = diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticApp.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticApp.scala index faea5312e..ad9bf2341 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticApp.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticApp.scala @@ -8,12 +8,12 @@ import zio.{ZIO, ZIOAppDefault, ZLayer} object IdiomaticApp extends ZIOAppDefault { import IdiomaticAppData.* - override def run = { + override def run = (for { people <- DataService.getPeople() - _ <- printLine(s"People: ${people}") - joes <- DataService.getPeopleByName("Joe") - _ <- printLine(s"Joes: ${joes}") + _ <- printLine(s"People: ${people}") + joes <- DataService.getPeopleByName("Joe") + _ <- printLine(s"Joes: ${joes}") } yield ()) .provide( Quill.CassandraZioSession.fromPrefix("testStreamDB"), @@ -23,5 +23,4 @@ object IdiomaticApp extends ZIOAppDefault { ) .tapError(e => ZIO.succeed(println(s"Error Occurred: ${e}"))) .exitCode - } } diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticAppData.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticAppData.scala index 62b882c19..6d771c2b8 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticAppData.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/IdiomaticAppData.scala @@ -15,8 +15,8 @@ object IdiomaticAppData { inline def people = quote { query[Person] } - inline def peopleByName = quote { - (name: String) => people.filter(p => p.name == name).allowFiltering + inline def peopleByName = quote { (name: String) => + people.filter(p => p.name == name).allowFiltering } } @@ -32,7 +32,8 @@ object IdiomaticAppData { def getPeople(): ZIO[Any, Throwable, List[Person]] = quill.run(queryService.people) - def getPeopleByName(name: String): ZIO[Any, Throwable, List[Person]] = quill.run(queryService.peopleByName(lift(name))) + def getPeopleByName(name: String): ZIO[Any, Throwable, List[Person]] = + quill.run(queryService.peopleByName(lift(name))) } object DataService { diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleApp.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleApp.scala index 4125c58a8..9220b0abe 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleApp.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleApp.scala @@ -18,8 +18,10 @@ object ExampleApp extends ZIOAppDefault { inline def people = quote { query[Person] } - MyZioPostgresContext.run(people) + MyZioPostgresContext + .run(people) .tap(result => printLine(result.toString)) - .provide(zioSessionLayer).exitCode + .provide(zioSessionLayer) + .exitCode } } diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala index 64a92f11c..af4ba751b 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/ExampleAppImplicitEnv.scala @@ -18,16 +18,16 @@ object ExampleAppImplicitEnv extends ZIOAppDefault { import Ctx.* implicit val env: Implicit[CassandraZioSession] = Implicit(cs) - def joes = Ctx.run { query[Person].filter(p => p.name == "Joe") }.implicitly - def jills = Ctx.run { query[Person].filter(p => p.name == "Jill") }.implicitly - def alexes = Ctx.run { query[Person].filter(p => p.name == "Alex") }.implicitly + def joes = Ctx.run(query[Person].filter(p => p.name == "Joe")).implicitly + def jills = Ctx.run(query[Person].filter(p => p.name == "Jill")).implicitly + def alexes = Ctx.run(query[Person].filter(p => p.name == "Alex")).implicitly } override def run = { val result = for { csession <- ZIO.scoped(zioSessionLayer.build) - joes <- MyQueryService(csession.get).joes + joes <- MyQueryService(csession.get).joes } yield joes result diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/PlainApp.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/PlainApp.scala index 5cce7dd02..9e737ee01 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/PlainApp.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/examples/other/PlainApp.scala @@ -19,7 +19,8 @@ object PlainApp { query[Person] } val czio = - MyZioPostgresContext.run(people) + MyZioPostgresContext + .run(people) .tap(result => printLine(result.toString)) .provide(zioSession) diff --git a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/package.scala b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/package.scala index 69accb888..9ce32712f 100644 --- a/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/package.scala +++ b/quill-cassandra-zio/src/test/scala/io/getquill/context/cassandra/zio/package.scala @@ -5,7 +5,6 @@ import io.getquill.cassandrazio.Quill import io.getquill.context.cassandra.zio.ZioCassandraSpec.runLayerUnsafe package object zio { - val pool = runLayerUnsafe(Quill.CassandraZioSession.fromPrefix("testStreamDB")) + val pool = runLayerUnsafe(Quill.CassandraZioSession.fromPrefix("testStreamDB")) lazy val testZioDB = new Quill.Cassandra(Literal, pool) with CassandraTestEntities } - diff --git a/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala b/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala index 9f95ce0a8..94d9f999e 100644 --- a/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/CassandraAsyncContext.scala @@ -1,26 +1,25 @@ package io.getquill -import com.datastax.oss.driver.api.core.{ CqlSession, CqlSessionBuilder } +import com.datastax.oss.driver.api.core.{CqlSession, CqlSessionBuilder} import com.typesafe.config.Config import io.getquill.context.ExecutionInfo import io.getquill.context.cassandra.util.FutureConversions._ //import io.getquill.monad.ScalaFutureIOMonad -import io.getquill.util.{ ContextLogger, LoadConfig } +import io.getquill.util.{ContextLogger, LoadConfig} import io.getquill.context.RunnerSummoningBehavior import scala.jdk.CollectionConverters._ import scala.compat.java8.FutureConverters._ -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.{ExecutionContext, Future} import scala.annotation.targetName class CassandraAsyncContext[+N <: NamingStrategy]( - naming: N, - session: CqlSession, + naming: N, + session: CqlSession, preparedStatementCacheSize: Long -) - extends CassandraCqlSessionContext[N](naming, session, preparedStatementCacheSize) - /*with ScalaFutureIOMonad*/ { +) extends CassandraCqlSessionContext[N](naming, session, preparedStatementCacheSize) + /*with ScalaFutureIOMonad*/ { // The ProtoQuill way of doing `implicit ec: ExceutionContext`. // This will cause the Context.scala `run` functions etc... summon an implicit @@ -29,7 +28,7 @@ class CassandraAsyncContext[+N <: NamingStrategy]( // parameters to functions (which the Scala2-Quill implementation relied on // to summon an ExecutionContext). override type RunnerBehavior = RunnerSummoningBehavior.Implicit - override type Runner = ExecutionContext + override type Runner = ExecutionContext def this(naming: N, config: CassandraContextConfig) = this(naming, config.session, config.preparedStatementCacheSize) @@ -39,53 +38,64 @@ class CassandraAsyncContext[+N <: NamingStrategy]( private val logger = ContextLogger(classOf[CassandraAsyncContext[_]]) - override type Result[T] = Future[T] - override type RunQueryResult[T] = List[T] + override type Result[T] = Future[T] + override type RunQueryResult[T] = List[T] override type RunQuerySingleResult[T] = T - override type RunActionResult = Unit - override type RunBatchActionResult = Unit + override type RunActionResult = Unit + override type RunBatchActionResult = Unit // In ProtoQuill this is defined in CassandraRowContext and the Runner is ExecutionContext // override type Runner = Unit @targetName("runQueryDefault") inline def run[T](inline quoted: Quoted[Query[T]]): Future[List[T]] = InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): Future[List[T]] = InternalApi.runQuery(quoted, wrap) + inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): Future[List[T]] = + InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): Future[T] = InternalApi.runQuerySingle(quoted) @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): Future[Unit] = InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Future[Unit] = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Future[Unit] = + InternalApi.runBatchAction(quoted, 1) // override def performIO[T](io: IO[T, _], transactional: Boolean = false)(implicit ec: ExecutionContext): Result[T] = { // if (transactional) logger.underlying.warn("Cassandra doesn't support transactions, ignoring `io.transactional`") // super.performIO(io) // } - def executeQuery[T](cql: String, prepare: Prepare, extractor: Extractor[T])(info: ExecutionInfo, dc: ExecutionContext): Result[RunQueryResult[T]] = { + def executeQuery[T](cql: String, prepare: Prepare, extractor: Extractor[T])( + info: ExecutionInfo, + dc: ExecutionContext + ): Result[RunQueryResult[T]] = { implicit val ec = dc - val statement = prepareAsyncAndGetStatement(cql, prepare, this, logger) + val statement = prepareAsyncAndGetStatement(cql, prepare, this, logger) statement.map(st => session.execute(st).asScala.toList.map(row => extractor(row, this))) } - def executeQuerySingle[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): Result[RunQuerySingleResult[T]] = { + def executeQuerySingle[T]( + cql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): Result[RunQuerySingleResult[T]] = { implicit val ec = dc executeQuery(cql, prepare, extractor)(info, dc).map(handleSingleResult(cql, _)) } - def executeAction(cql: String, prepare: Prepare = identityPrepare)(executionInfo: ExecutionInfo, dc: Runner): Result[RunActionResult] = { + def executeAction( + cql: String, + prepare: Prepare = identityPrepare + )(executionInfo: ExecutionInfo, dc: Runner): Result[RunActionResult] = { implicit val ec = dc - val statement = prepareAsyncAndGetStatement(cql, prepare, this, logger) + val statement = prepareAsyncAndGetStatement(cql, prepare, this, logger) statement.flatMap(st => session.executeAsync(st).toCompletableFuture.toScala).map(_ => ()) } def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): Result[RunBatchActionResult] = { implicit val ec = dc Future.sequence { - groups.flatMap { - case BatchGroup(cql, prepare) => - prepare.map(executeAction(cql, _)(info, dc)) + groups.flatMap { case BatchGroup(cql, prepare) => + prepare.map(executeAction(cql, _)(info, dc)) } }.map(_ => ()) } diff --git a/quill-cassandra/src/main/scala/io/getquill/CassandraContextConfig.scala b/quill-cassandra/src/main/scala/io/getquill/CassandraContextConfig.scala index 139bbc9a2..d77250e91 100644 --- a/quill-cassandra/src/main/scala/io/getquill/CassandraContextConfig.scala +++ b/quill-cassandra/src/main/scala/io/getquill/CassandraContextConfig.scala @@ -1,6 +1,6 @@ package io.getquill -import com.datastax.oss.driver.api.core.{ CqlSession, CqlSessionBuilder } +import com.datastax.oss.driver.api.core.{CqlSession, CqlSessionBuilder} import com.typesafe.config.Config import io.getquill.context.cassandra.cluster.SessionBuilder @@ -11,10 +11,11 @@ case class CassandraContextConfig(config: Config) { else 1000 def builder: CqlSessionBuilder = SessionBuilder(config.getConfig("session")) - lazy val session: CqlSession = builder.withKeyspace(keyspace).build() + lazy val session: CqlSession = builder.withKeyspace(keyspace).build() /** - * the keyspace is from config file. to get actual active keyspace use session.getKeyspace + * the keyspace is from config file. to get actual active keyspace use + * session.getKeyspace * @return */ def keyspace: String = config.getString("keyspace") diff --git a/quill-cassandra/src/main/scala/io/getquill/CassandraCqlSessionContext.scala b/quill-cassandra/src/main/scala/io/getquill/CassandraCqlSessionContext.scala index 96a13b256..85cdde37f 100644 --- a/quill-cassandra/src/main/scala/io/getquill/CassandraCqlSessionContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/CassandraCqlSessionContext.scala @@ -1,16 +1,16 @@ package io.getquill import com.datastax.oss.driver.api.core.CqlSession -import io.getquill.context.{ AsyncFutureCache, CassandraSession, SyncCache } +import io.getquill.context.{AsyncFutureCache, CassandraSession, SyncCache} import io.getquill.context.cassandra.CassandraSessionContext abstract class CassandraCqlSessionContext[+N <: NamingStrategy]( - val naming: N, - val session: CqlSession, + val naming: N, + val session: CqlSession, val preparedStatementCacheSize: Long -) - extends CassandraSessionContext[N] with CassandraSession - with SyncCache - with AsyncFutureCache { - override def close(): Unit = () - } \ No newline at end of file +) extends CassandraSessionContext[N] + with CassandraSession + with SyncCache + with AsyncFutureCache { + override def close(): Unit = () +} diff --git a/quill-cassandra/src/main/scala/io/getquill/CassandraMirrorContext.scala b/quill-cassandra/src/main/scala/io/getquill/CassandraMirrorContext.scala index 5aa9aca1a..49fde8e3f 100644 --- a/quill-cassandra/src/main/scala/io/getquill/CassandraMirrorContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/CassandraMirrorContext.scala @@ -1,37 +1,47 @@ package io.getquill import java.util.Date -import io.getquill.context.cassandra.encoding.{ CassandraMapper, CassandraType, MapperSide } -import io.getquill.context.cassandra.{ CassandraContext, CqlIdiom } +import io.getquill.context.cassandra.encoding.{CassandraMapper, CassandraType, MapperSide} +import io.getquill.context.cassandra.{CassandraContext, CqlIdiom} -import java.time.{ Instant, LocalDate } +import java.time.{Instant, LocalDate} import scala.reflect.ClassTag class CassandraMirrorContextWithQueryProbing extends CassandraMirrorContext(Literal) with QueryProbing class CassandraMirrorContext[+Naming <: NamingStrategy](naming: Naming) - extends MirrorContext[CqlIdiom, Naming](CqlIdiom, naming) - with CassandraContext[Naming] { + extends MirrorContext[CqlIdiom, Naming](CqlIdiom, naming) + with CassandraContext[Naming] { - implicit val timestampDecoder: Decoder[Instant] = decoder[Instant] - implicit val timestampEncoder: Encoder[Instant] = encoder[Instant] + implicit val timestampDecoder: Decoder[Instant] = decoder[Instant] + implicit val timestampEncoder: Encoder[Instant] = encoder[Instant] implicit val cassandraLocalDateDecoder: Decoder[LocalDate] = decoder[LocalDate] implicit val cassandraLocalDateEncoder: Encoder[LocalDate] = encoder[LocalDate] - implicit def listDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[List[T]] = decoderUnsafe[List[T]] - implicit def setDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[Set[T]] = decoderUnsafe[Set[T]] - implicit def mapDecoder[K, V, KCas, VCas]( - implicit + implicit def listDecoder[T, Cas](implicit + mapper: CassandraMapper[Cas, T, MapperSide.Decode], + ct: ClassTag[Cas] + ): Decoder[List[T]] = decoderUnsafe[List[T]] + implicit def setDecoder[T, Cas](implicit + mapper: CassandraMapper[Cas, T, MapperSide.Decode], + ct: ClassTag[Cas] + ): Decoder[Set[T]] = decoderUnsafe[Set[T]] + implicit def mapDecoder[K, V, KCas, VCas](implicit keyMapper: CassandraMapper[KCas, K, MapperSide.Decode], valMapper: CassandraMapper[VCas, V, MapperSide.Decode], a: ClassTag[KCas], b: ClassTag[VCas] ): Decoder[Map[K, V]] = decoderUnsafe[Map[K, V]] - implicit def listEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas, MapperSide.Encode], ct: ClassTag[Cas]): Encoder[List[T]] = encoder[List[T]] - implicit def setEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas, MapperSide.Encode], ct: ClassTag[Cas]): Encoder[Set[T]] = encoder[Set[T]] - implicit def mapEncoder[K, V, KCas, VCas]( - implicit + implicit def listEncoder[T, Cas](implicit + mapper: CassandraMapper[T, Cas, MapperSide.Encode], + ct: ClassTag[Cas] + ): Encoder[List[T]] = encoder[List[T]] + implicit def setEncoder[T, Cas](implicit + mapper: CassandraMapper[T, Cas, MapperSide.Encode], + ct: ClassTag[Cas] + ): Encoder[Set[T]] = encoder[Set[T]] + implicit def mapEncoder[K, V, KCas, VCas](implicit keyMapper: CassandraMapper[K, KCas, MapperSide.Encode], valMapper: CassandraMapper[V, VCas, MapperSide.Encode], a: ClassTag[KCas], @@ -39,6 +49,6 @@ class CassandraMirrorContext[+Naming <: NamingStrategy](naming: Naming) ): Encoder[Map[K, V]] = encoder[Map[K, V]] implicit def udtCassandraType[T <: Udt]: CassandraType[T] = CassandraType.of[T] - implicit def udtDecoder[T <: Udt: ClassTag]: Decoder[T] = decoder[T] - implicit def udtEncoder[T <: Udt]: Encoder[T] = encoder[T] -} \ No newline at end of file + implicit def udtDecoder[T <: Udt: ClassTag]: Decoder[T] = decoder[T] + implicit def udtEncoder[T <: Udt]: Encoder[T] = encoder[T] +} diff --git a/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala b/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala index f9e3973cf..ef713b22c 100644 --- a/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/CassandraSyncContext.scala @@ -4,18 +4,17 @@ import com.datastax.oss.driver.api.core.CqlSession import com.typesafe.config.Config import io.getquill.context.ExecutionInfo //import io.getquill.monad.SyncIOMonad -import io.getquill.util.{ ContextLogger, LoadConfig } +import io.getquill.util.{ContextLogger, LoadConfig} import scala.jdk.CollectionConverters._ import scala.annotation.targetName class CassandraSyncContext[+N <: NamingStrategy]( - naming: N, - session: CqlSession, + naming: N, + session: CqlSession, preparedStatementCacheSize: Long -) - extends CassandraCqlSessionContext[N](naming, session, preparedStatementCacheSize) - /*with SyncIOMonad*/ { +) extends CassandraCqlSessionContext[N](naming, session, preparedStatementCacheSize) + /*with SyncIOMonad*/ { def this(naming: N, config: CassandraContextConfig) = this(naming, config.session, config.preparedStatementCacheSize) def this(naming: N, config: Config) = this(naming, CassandraContextConfig(config)) @@ -23,23 +22,25 @@ class CassandraSyncContext[+N <: NamingStrategy]( private val logger = ContextLogger(classOf[CassandraSyncContext[_]]) - override type Result[T] = T - override type RunQueryResult[T] = List[T] + override type Result[T] = T + override type RunQueryResult[T] = List[T] override type RunQuerySingleResult[T] = T - override type RunActionResult = Unit - override type RunBatchActionResult = Unit - override type Runner = Unit + override type RunActionResult = Unit + override type RunBatchActionResult = Unit + override type Runner = Unit @targetName("runQueryDefault") inline def run[T](inline quoted: Quoted[Query[T]]): List[T] = InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): List[T] = InternalApi.runQuery(quoted, wrap) + inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): List[T] = + InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): T = InternalApi.runQuerySingle(quoted) @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): Unit = InternalApi.runAction(quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Unit = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): Unit = + InternalApi.runBatchAction(quoted, 1) override protected def context: Runner = () @@ -48,14 +49,20 @@ class CassandraSyncContext[+N <: NamingStrategy]( // super.performIO(io) // } - def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): List[T] = { + def executeQuery[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)( + info: ExecutionInfo, + dc: Runner + ): List[T] = { val (params, bs) = prepare(this.prepare(cql), this) logger.logQuery(cql, params) - session.execute(bs) - .all.asScala.toList.map(row => extractor(row, this)) + session.execute(bs).all.asScala.toList.map(row => extractor(row, this)) } - def executeQuerySingle[T](cql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): T = + def executeQuerySingle[T]( + cql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): T = handleSingleResult(cql, executeQuery(cql, prepare, extractor)(info, dc)) def executeAction(cql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): Unit = { @@ -66,8 +73,7 @@ class CassandraSyncContext[+N <: NamingStrategy]( } def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): Unit = - groups.foreach { - case BatchGroup(cql, prepare) => - prepare.foreach(executeAction(cql, _)(info, dc)) + groups.foreach { case BatchGroup(cql, prepare) => + prepare.foreach(executeAction(cql, _)(info, dc)) } } diff --git a/quill-cassandra/src/main/scala/io/getquill/Ops.scala b/quill-cassandra/src/main/scala/io/getquill/Ops.scala index 584d8b28f..16638531b 100644 --- a/quill-cassandra/src/main/scala/io/getquill/Ops.scala +++ b/quill-cassandra/src/main/scala/io/getquill/Ops.scala @@ -1,39 +1,38 @@ package io.getquill /** - * These are helper methods for doing various common tasks in cassandra. For example, - * if you are filtering by non-indexed methods, use the query.allowFiltering method - * to turn off the cassandra check. Note that this will likely cause a full-table scan. + * These are helper methods for doing various common tasks in cassandra. For + * example, if you are filtering by non-indexed methods, use the + * query.allowFiltering method to turn off the cassandra check. Note that this + * will likely cause a full-table scan. * - * Normally in Quill these are quoted however in ProtoQuill they are not always unquoted e.g. - * when something like this happens: - * def peopleByName = quote { - * (name: String) => people.filter(p => p.name == name).allowFiltering - * } - * since the type Quoted[Q] for allowFiltering is not auto-unquoted, error will happnen. - * This is why these are left as unquoted inline def. + * Normally in Quill these are quoted however in ProtoQuill they are not always + * unquoted e.g. when something like this happens: def peopleByName = quote { + * (name: String) => people.filter(p => p.name == name).allowFiltering } since + * the type Quoted[Q] for allowFiltering is not auto-unquoted, error will + * happnen. This is why these are left as unquoted inline def. */ extension [Q <: Query[_]](inline q: Q) { inline def allowFiltering = sql"$q ALLOW FILTERING".generic.pure.as[Q] } extension [A <: EntityQuery[_]](inline q: A) { - inline def usingTimestamp(inline ts: Int) = sql"$q USING TIMESTAMP $ts".as[A] - inline def usingTtl(inline ttl: Int) = sql"$q USING TTL $ttl".as[A] + inline def usingTimestamp(inline ts: Int) = sql"$q USING TIMESTAMP $ts".as[A] + inline def usingTtl(inline ttl: Int) = sql"$q USING TTL $ttl".as[A] inline def using(inline ts: Int, inline ttl: Int) = sql"$q USING TIMESTAMP $ts AND TTL $ttl".as[A] } extension [A <: Insert[_]](inline q: A) { - inline def ifNotExists = sql"$q IF NOT EXISTS".as[A] - inline def usingTimestamp(inline ts: Int) = sql"$q USING TIMESTAMP $ts".as[A] - inline def usingTtl(inline ttl: Int) = sql"$q USING TTL $ttl".as[A] + inline def ifNotExists = sql"$q IF NOT EXISTS".as[A] + inline def usingTimestamp(inline ts: Int) = sql"$q USING TIMESTAMP $ts".as[A] + inline def usingTtl(inline ttl: Int) = sql"$q USING TTL $ttl".as[A] inline def using(inline ts: Int, inline ttl: Int) = sql"$q USING TIMESTAMP $ts AND TTL $ttl".as[A] } extension [A <: Delete[_]](inline q: A) { - inline def ifExists = sql"$q IF EXISTS".as[A] - inline def usingTimestamp(inline ts: Int) = sql"$q USING TIMESTAMP $ts".as[A] - inline def usingTtl(inline ttl: Int) = sql"$q USING TTL $ttl".as[A] + inline def ifExists = sql"$q IF EXISTS".as[A] + inline def usingTimestamp(inline ts: Int) = sql"$q USING TIMESTAMP $ts".as[A] + inline def usingTtl(inline ttl: Int) = sql"$q USING TTL $ttl".as[A] inline def using(inline ts: Int, inline ttl: Int) = sql"$q USING TIMESTAMP $ts AND TTL $ttl".as[A] } diff --git a/quill-cassandra/src/main/scala/io/getquill/UdtMetaDsl.scala b/quill-cassandra/src/main/scala/io/getquill/UdtMetaDsl.scala index 5ad3e6bcd..092716048 100644 --- a/quill-cassandra/src/main/scala/io/getquill/UdtMetaDsl.scala +++ b/quill-cassandra/src/main/scala/io/getquill/UdtMetaDsl.scala @@ -8,11 +8,16 @@ import scala.language.experimental.macros /** * Creates udt meta to override udt name / keyspace and rename columns * - * @param path - either `udt_name` or `keyspace.udt_name` - * @param columns - columns to rename - * @return udt meta + * @param path + * \- either `udt_name` or `keyspace.udt_name` + * @param columns + * \- columns to rename + * @return + * udt meta */ -inline def udtMeta[T <: Udt](inline path: String, inline columns: (T => (Any, String))*): UdtMeta[T] = ${UdtMetaDslMacro[T]('path, 'columns)} +inline def udtMeta[T <: Udt](inline path: String, inline columns: (T => (Any, String))*): UdtMeta[T] = ${ + UdtMetaDslMacro[T]('path, 'columns) +} trait UdtMeta[T <: Udt] { def keyspace: Option[String] @@ -26,7 +31,7 @@ object UdtMeta { import quotes.reflect.* if (TypeRepr.of[T] =:= TypeRepr.of[Udt]) // TODO quill.trace.types 'summoning' level should enable this - //println("Cannot derive schema for the base Udt (print the stack trace too)") + // println("Cannot derive schema for the base Udt (print the stack trace too)") '{ ??? } else Expr.summon[UdtMeta[T]] match { @@ -36,8 +41,8 @@ object UdtMeta { case None => val typeName = TypeRepr.of[T].widen.typeSymbol.name // TODO quill.trace.types 'summoning' level should enable this - //println(s"Dsl not found. Making one with the type name: ${typeName}") + // println(s"Dsl not found. Making one with the type name: ${typeName}") UdtMetaDslMacro[T](Expr(typeName), Expr.ofList(Seq())) } } -} // end UdtMeta \ No newline at end of file +} // end UdtMeta diff --git a/quill-cassandra/src/main/scala/io/getquill/context/Caches.scala b/quill-cassandra/src/main/scala/io/getquill/context/Caches.scala index ac3868165..73e237ed4 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/Caches.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/Caches.scala @@ -1,11 +1,11 @@ package io.getquill.context -import com.datastax.oss.driver.api.core.cql.{ BoundStatement, PreparedStatement } +import com.datastax.oss.driver.api.core.cql.{BoundStatement, PreparedStatement} import io.getquill.context.cassandra.PrepareStatementCache import io.getquill.context.cassandra.util.FutureConversions._ import java.util.concurrent.CompletionStage -import scala.concurrent.{ ExecutionContext, Future } +import scala.concurrent.{ExecutionContext, Future} import scala.util.Failure import scala.compat.java8.FutureConverters._ diff --git a/quill-cassandra/src/main/scala/io/getquill/context/CassandraSession.scala b/quill-cassandra/src/main/scala/io/getquill/context/CassandraSession.scala index 1d03f1e6c..2bd4e9f90 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/CassandraSession.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/CassandraSession.scala @@ -23,19 +23,28 @@ trait CassandraSession extends UdtValueLookup { case udt :: Nil => udt.newValue() case Nil => fail(s"Could not find UDT `$udtName` in any keyspace") - case udts => udts - .find(udt => keyspace.contains(udt.getKeyspace.toString) || udt.getKeyspace.toString == session.getKeyspace.get().toString) - .map(_.newValue()) - .getOrElse(fail(s"Could not determine to which keyspace `$udtName` UDT belongs. " + - s"Please specify desired keyspace using UdtMeta")) + case udts => + udts + .find(udt => + keyspace + .contains(udt.getKeyspace.toString) || udt.getKeyspace.toString == session.getKeyspace.get().toString + ) + .map(_.newValue()) + .getOrElse( + fail( + s"Could not determine to which keyspace `$udtName` UDT belongs. " + + s"Please specify desired keyspace using UdtMeta" + ) + ) } - def close(): Unit = { + def close(): Unit = session.close() - } } trait UdtValueLookup { - def udtValueOf(udtName: String, keyspace: Option[String] = None): UdtValue = throw new IllegalStateException("UDTs are not supported by this context") + def udtValueOf(udtName: String, keyspace: Option[String] = None): UdtValue = throw new IllegalStateException( + "UDTs are not supported by this context" + ) def session: CqlSession } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraContext.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraContext.scala index 3e93f65a2..fb1998239 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraContext.scala @@ -1,19 +1,19 @@ package io.getquill.context.cassandra -import java.util.{ Date, UUID } +import java.util.{Date, UUID} import io.getquill.NamingStrategy import io.getquill.context.Context -import io.getquill.context.cassandra.encoding.{ CassandraMapper, Encodings, MapperSide } +import io.getquill.context.cassandra.encoding.{CassandraMapper, Encodings, MapperSide} -import java.time.{ Instant, LocalDate } +import java.time.{Instant, LocalDate} import scala.reflect.ClassTag trait CassandraContext[+N <: NamingStrategy] - extends Context[CqlIdiom, N] - with Encodings - //with UdtMetaDsl - // with Ops - { + extends Context[CqlIdiom, N] + with Encodings + // with UdtMetaDsl + // with Ops + { implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] @@ -46,23 +46,33 @@ trait CassandraContext[+N <: NamingStrategy] implicit val timestampEncoder: Encoder[Instant] implicit val cassandraLocalDateEncoder: Encoder[LocalDate] - implicit def listDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[List[T]] - implicit def setDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[Set[T]] - implicit def mapDecoder[K, V, KCas, VCas]( - implicit + implicit def listDecoder[T, Cas](implicit + mapper: CassandraMapper[Cas, T, MapperSide.Decode], + ct: ClassTag[Cas] + ): Decoder[List[T]] + implicit def setDecoder[T, Cas](implicit + mapper: CassandraMapper[Cas, T, MapperSide.Decode], + ct: ClassTag[Cas] + ): Decoder[Set[T]] + implicit def mapDecoder[K, V, KCas, VCas](implicit keyMapper: CassandraMapper[KCas, K, MapperSide.Decode], valMapper: CassandraMapper[VCas, V, MapperSide.Decode], a: ClassTag[KCas], b: ClassTag[VCas] ): Decoder[Map[K, V]] - implicit def listEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas, MapperSide.Encode], ct: ClassTag[Cas]): Encoder[List[T]] - implicit def setEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas, MapperSide.Encode], ct: ClassTag[Cas]): Encoder[Set[T]] - implicit def mapEncoder[K, V, KCas, VCas]( - implicit + implicit def listEncoder[T, Cas](implicit + mapper: CassandraMapper[T, Cas, MapperSide.Encode], + ct: ClassTag[Cas] + ): Encoder[List[T]] + implicit def setEncoder[T, Cas](implicit + mapper: CassandraMapper[T, Cas, MapperSide.Encode], + ct: ClassTag[Cas] + ): Encoder[Set[T]] + implicit def mapEncoder[K, V, KCas, VCas](implicit keyMapper: CassandraMapper[K, KCas, MapperSide.Encode], valMapper: CassandraMapper[V, VCas, MapperSide.Encode], a: ClassTag[KCas], b: ClassTag[VCas] ): Encoder[Map[K, V]] -} \ No newline at end of file +} diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraSessionContext.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraSessionContext.scala index 4694d0de4..f60bd17e3 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraSessionContext.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/CassandraSessionContext.scala @@ -1,50 +1,47 @@ package io.getquill.context.cassandra import io.getquill.ReturnAction -import com.datastax.oss.driver.api.core.cql.{ BoundStatement, Row } +import com.datastax.oss.driver.api.core.cql.{BoundStatement, Row} import io.getquill.NamingStrategy -import io.getquill.context.{ CassandraSession, ExecutionInfo, Context, UdtValueLookup } -import io.getquill.context.cassandra.encoding.{ CassandraTypes, Decoders, Encoders } //UdtEncoding +import io.getquill.context.{CassandraSession, ExecutionInfo, Context, UdtValueLookup} +import io.getquill.context.cassandra.encoding.{CassandraTypes, Decoders, Encoders} //UdtEncoding import io.getquill.util.ContextLogger import io.getquill.util.Messages.fail import io.getquill.context.RowContext -import scala.concurrent.{ Await, ExecutionContext, Future } +import scala.concurrent.{Await, ExecutionContext, Future} import scala.concurrent.duration._ import scala.util.Try import io.getquill.context.ProtoContextSecundus import io.getquill.generic.GenericNullChecker abstract class CassandraSessionContext[+N <: NamingStrategy] - extends CassandraPrepareContext[N] - with CassandraBaseContext[N] + extends CassandraPrepareContext[N] + with CassandraBaseContext[N] /** - * When using this context, we cannot encode UDTs since does not have a proper CassandraSession trait mixed in with udtValueOf. - * Certain contexts e.g. the CassandraLagomContext does not currently have this ability. + * When using this context, we cannot encode UDTs since does not have a proper + * CassandraSession trait mixed in with udtValueOf. Certain contexts e.g. the + * CassandraLagomContext does not currently have this ability. */ -abstract class CassandraSessionlessContext[+N <: NamingStrategy] - extends CassandraPrepareContext[N] +abstract class CassandraSessionlessContext[+N <: NamingStrategy] extends CassandraPrepareContext[N] - -trait CassandraPrepareContext[+N <: NamingStrategy] -extends CassandraStandardContext[N] -with CassandraContext[N] { +trait CassandraPrepareContext[+N <: NamingStrategy] extends CassandraStandardContext[N] with CassandraContext[N] { protected def prepareAsync(cql: String)(implicit executionContext: ExecutionContext): Future[BoundStatement] - def probe(cql: String): Try[_] = { + def probe(cql: String): Try[_] = Try { Await.result(prepareAsync(cql)(ExecutionContext.Implicits.global), 1.minute) () } - } - protected def prepareAsyncAndGetStatement(cql: String, prepare: Prepare, session: Session, logger: ContextLogger)(implicit executionContext: ExecutionContext): Future[BoundStatement] = { + protected def prepareAsyncAndGetStatement(cql: String, prepare: Prepare, session: Session, logger: ContextLogger)( + implicit executionContext: ExecutionContext + ): Future[BoundStatement] = { val prepareResult = this.prepareAsync(cql).map(row => prepare(row, session)) - val preparedRow = prepareResult.map { - case (params, bs) => - logger.logQuery(cql, params) - bs + val preparedRow = prepareResult.map { case (params, bs) => + logger.logQuery(cql, params) + bs } preparedRow } @@ -55,25 +52,38 @@ trait CassandraBaseContext[+N <: NamingStrategy] extends CassandraStandardContex } trait CassandraStandardContext[+N <: NamingStrategy] - extends CassandraRowContext - with CassandraContext[N] - with Context[CqlIdiom, N] - with Encoders - with Decoders - with CassandraTypes { + extends CassandraRowContext + with CassandraContext[N] + with Context[CqlIdiom, N] + with Encoders + with Decoders + with CassandraTypes { /*with UdtEncoding*/ // Overriding them as defined in ProtoContextSecundus - override type RunActionReturningResult[T] = Unit + override type RunActionReturningResult[T] = Unit override type RunBatchActionReturningResult[T] = Unit - override def executeActionReturning[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner) = + override def executeActionReturning[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner) = fail("Cassandra doesn't support `returning`.") - override def executeActionReturningMany[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner) = + override def executeActionReturningMany[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner) = fail("Cassandra doesn't support `returningMany`.") - override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner) = + override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])( + info: ExecutionInfo, + dc: Runner + ) = fail("Cassandra doesn't support `returning`.") } @@ -82,10 +92,10 @@ trait CassandraRowContext extends RowContext { val idiom = CqlIdiom override type PrepareRow = BoundStatement - override type ResultRow = Row + override type ResultRow = Row type BaseNullChecker = GenericNullChecker[ResultRow, Session] - type NullChecker = CassandraNullChecker + type NullChecker = CassandraNullChecker class CassandraNullChecker extends BaseNullChecker { override def apply(index: Int, row: Row): Boolean = row.isNull(index) } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/PrepareStatementCache.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/PrepareStatementCache.scala index 40da55144..14c82c548 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/PrepareStatementCache.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/PrepareStatementCache.scala @@ -9,8 +9,7 @@ import java.util.concurrent.Callable class PrepareStatementCache[V <: AnyRef](size: Long) { private val cache = - CacheBuilder - .newBuilder + CacheBuilder.newBuilder .maximumSize(size) .build[java.lang.Long, V]() @@ -26,10 +25,9 @@ class PrepareStatementCache[V <: AnyRef](size: Long) { def invalidate(stmt: String): Unit = cache.invalidate(hash(stmt)) - private def hash(string: String): java.lang.Long = { + private def hash(string: String): java.lang.Long = hasher .hashString(string, Charsets.UTF_8) .asLong() - } } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/UdtMetaDslMacro.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/UdtMetaDslMacro.scala index 4d3edb323..8c5c3e574 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/UdtMetaDslMacro.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/UdtMetaDslMacro.scala @@ -9,20 +9,23 @@ import io.getquill.{Udt, UdtMeta} object UdtMetaDslMacro { /** - * TODO Eventually we might want to use a Naming schema `default` method to look up UDT object names. - * Maybe should have summonable implicit Naming instance here? (probably would only need the Expr[Naming] - * since we don't need to directly call it on columns hence wouldn't need to deal with compile-time/runtime - * dicotomies) + * TODO Eventually we might want to use a Naming schema `default` method to + * look up UDT object names. Maybe should have summonable implicit Naming + * instance here? (probably would only need the Expr[Naming] since we don't + * need to directly call it on columns hence wouldn't need to deal with + * compile-time/runtime dicotomies) */ - def apply[T <: Udt: Type](path: Expr[String], columns: Expr[Seq[T => (Any, String)]])(using Quotes): Expr[UdtMeta[T]] = { + def apply[T <: Udt: Type](path: Expr[String], columns: Expr[Seq[T => (Any, String)]])(using + Quotes + ): Expr[UdtMeta[T]] = { import quotes.reflect._ val columnsList = columns match { - case '{ Nil } => Nil - case '{ List() } => Nil + case '{ Nil } => Nil + case '{ List() } => Nil case Varargs(cols) => cols - case _ => report.throwError(s"Invalid UdtMeta columns list: ${Format.Expr(columns)}", columns) + case _ => report.throwError(s"Invalid UdtMeta columns list: ${Format.Expr(columns)}", columns) } // Do we need to do asTerm.underlyingArgument.asExpr to the terms here? As far as I understand, @@ -31,16 +34,16 @@ object UdtMetaDslMacro { val pairs = columnsList.map { case Lambda1(ident, identTpe, ArrowFunction(prop, ConstExpr(value: String))) => val propName: String = prop.asTerm.symbol.name - '{ (${Expr(propName: String)}, ${Expr(value: String)}) } + '{ (${ Expr(propName: String) }, ${ Expr(value: String) }) } case other => report.throwError(s"Invalid UdtMeta assignment statement: ${Format.Expr(other)}", other) } '{ new UdtMeta[T] { - val (nm, ks) = UdtMetaUtils.parse(${path}) // TODO Insert `naming.default` here in future - val map = Map[String, String](${Varargs(pairs)}: _*) - def name = nm - def keyspace = ks + val (nm, ks) = UdtMetaUtils.parse(${ path }) // TODO Insert `naming.default` here in future + val map = Map[String, String](${ Varargs(pairs) }: _*) + def name = nm + def keyspace = ks def alias(col: String) = map.get(col) } } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/cluster/SessionBuilder.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/cluster/SessionBuilder.scala index 47429ee0a..c9c1957d8 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/cluster/SessionBuilder.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/cluster/SessionBuilder.scala @@ -3,7 +3,7 @@ package io.getquill.context.cassandra.cluster import com.datastax.oss.driver.api.core.config.DriverConfigLoader import io.getquill.util.Messages._ import com.typesafe.config.Config -import com.datastax.oss.driver.api.core.{ CqlSession, CqlSessionBuilder } +import com.datastax.oss.driver.api.core.{CqlSession, CqlSessionBuilder} import com.datastax.oss.driver.internal.core.config.typesafe.DefaultDriverConfigLoader import java.util.function.Supplier @@ -13,27 +13,23 @@ object SessionBuilder { /** * Reference configuration: * https://docs.datastax.com/en/developer/java-driver/4.13/manual/core/configuration/reference/ - * config values should be under datastax-java-driver - * sample HOCON config section: + * config values should be under datastax-java-driver sample HOCON config + * section: * - *
{@code  {
-   *    basic.contact-points = [ "127.0.0.1:9042" ]
-   *    basic.load-balancing-policy.local-datacenter = dc1
-   *    basic.config-reload-interval = 7 minutes
-   *    basic.request.consistency = LOCAL_QUORUM
-   *    advanced.connection.connect-timeout = 21 seconds
-   *  }
-   * }
+ *
{@code { basic.contact-points = [ "127.0.0.1:9042" ]
+   * basic.load-balancing-policy.local-datacenter = dc1
+   * basic.config-reload-interval = 7 minutes basic.request.consistency =
+   * LOCAL_QUORUM advanced.connection.connect-timeout = 21 seconds } }
* @param cfg * @return */ - def apply(cfg: Config): CqlSessionBuilder = { - CqlSession.builder() + def apply(cfg: Config): CqlSessionBuilder = + CqlSession + .builder() .withConfigLoader( new DefaultDriverConfigLoader(new Supplier[Config] { override def get(): Config = cfg.withFallback(DefaultDriverConfigLoader.DEFAULT_CONFIG_SUPPLIER.get()) }) ) - } } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapper.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapper.scala index ca35f3f84..2a3442e66 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapper.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapper.scala @@ -9,8 +9,10 @@ import io.getquill.context.UdtValueLookup */ case class CassandraMapper[I, O, Side <: MapperSide](f: (I, UdtValueLookup) => O) object CassandraMapper { - def encode[I, O](f: I => O): CassandraMapper[I, O, MapperSide.Encode] = CassandraMapper[I, O, MapperSide.Encode]((iOrig, _) => f(iOrig)) - def decode[I, O](f: I => O): CassandraMapper[I, O, MapperSide.Decode] = CassandraMapper[I, O, MapperSide.Decode]((iOrig, _) => f(iOrig)) + def encode[I, O](f: I => O): CassandraMapper[I, O, MapperSide.Encode] = + CassandraMapper[I, O, MapperSide.Encode]((iOrig, _) => f(iOrig)) + def decode[I, O](f: I => O): CassandraMapper[I, O, MapperSide.Decode] = + CassandraMapper[I, O, MapperSide.Decode]((iOrig, _) => f(iOrig)) } sealed trait MapperSide diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapperConversions.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapperConversions.scala index 1f3242099..429e1cd3f 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapperConversions.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraMapperConversions.scala @@ -4,36 +4,36 @@ import io.getquill.MappedEncoding trait CassandraMapperConversions extends CassandraMapperConversionsLowPriorityImplicits { - implicit def cassandraIdentityEncodeMapper[Cas](implicit cas: CassandraType[Cas]): CassandraMapper[Cas, Cas, MapperSide.Encode] = + implicit def cassandraIdentityEncodeMapper[Cas](implicit + cas: CassandraType[Cas] + ): CassandraMapper[Cas, Cas, MapperSide.Encode] = CassandraMapper((i, _) => i) - implicit def cassandraIdentityDecodeMapper[Cas](implicit cas: CassandraType[Cas]): CassandraMapper[Cas, Cas, MapperSide.Decode] = + implicit def cassandraIdentityDecodeMapper[Cas](implicit + cas: CassandraType[Cas] + ): CassandraMapper[Cas, Cas, MapperSide.Decode] = CassandraMapper((i, _) => i) - implicit def cassandraMapperEncode[T, Cas]( - implicit - m: MappedEncoding[T, Cas], + implicit def cassandraMapperEncode[T, Cas](implicit + m: MappedEncoding[T, Cas], cas: CassandraType[Cas] ): CassandraMapper[T, Cas, MapperSide.Encode] = CassandraMapper((i, _) => m.f(i)) - implicit def cassandraMapperDecode[T, Cas]( - implicit - m: MappedEncoding[Cas, T], + implicit def cassandraMapperDecode[T, Cas](implicit + m: MappedEncoding[Cas, T], cas: CassandraType[Cas] ): CassandraMapper[Cas, T, MapperSide.Decode] = CassandraMapper((i, _) => m.f(i)) } trait CassandraMapperConversionsLowPriorityImplicits { - implicit def cassandraMapperEncodeRec[I, O, Cas]( - implicit + implicit def cassandraMapperEncodeRec[I, O, Cas](implicit me: MappedEncoding[I, O], cm: CassandraMapper[O, Cas, MapperSide.Encode] ): CassandraMapper[I, Cas, MapperSide.Encode] = CassandraMapper((i, lookup) => cm.f(me.f(i), lookup)) - implicit def cassandraMapperDecodeRec[I, O, Cas]( - implicit - m: MappedEncoding[I, O], + implicit def cassandraMapperDecodeRec[I, O, Cas](implicit + m: MappedEncoding[I, O], cm: CassandraMapper[Cas, I, MapperSide.Decode] ): CassandraMapper[Cas, O, MapperSide.Decode] = CassandraMapper((i, lookup) => m.f(cm.f(i, lookup))) } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraType.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraType.scala index 1b681ca42..a76bf666b 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraType.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraType.scala @@ -6,4 +6,4 @@ package io.getquill.context.cassandra.encoding trait CassandraType[T] object CassandraType { def of[T]: CassandraType[T] = new CassandraType[T] {} -} \ No newline at end of file +} diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraTypes.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraTypes.scala index 343f02ff6..306c1e8d4 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraTypes.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CassandraTypes.scala @@ -1,35 +1,45 @@ package io.getquill.context.cassandra.encoding -import java.lang.{ Boolean => JBoolean, Double => JDouble, Float => JFloat, Integer => JInt, Long => JLong, Short => JShort, Byte => JByte } -import java.math.{ BigDecimal => JBigDecimal } +import java.lang.{ + Boolean => JBoolean, + Double => JDouble, + Float => JFloat, + Integer => JInt, + Long => JLong, + Short => JShort, + Byte => JByte +} +import java.math.{BigDecimal => JBigDecimal} import java.nio.ByteBuffer -import java.util.{ Date, UUID } -import java.time.{ Instant, LocalDate } +import java.util.{Date, UUID} +import java.time.{Instant, LocalDate} /** - * `CassandraTypes` contains implicit markers for already supported types by Cassandra. - * Any of this type can be used in raw encoders/decoders as well as in collections encoding. - * For custom types please use `MappedEncoding` as in `MappedTypes` trait for example. + * `CassandraTypes` contains implicit markers for already supported types by + * Cassandra. Any of this type can be used in raw encoders/decoders as well as + * in collections encoding. For custom types please use `MappedEncoding` as in + * `MappedTypes` trait for example. */ trait CassandraTypes extends CassandraMappedTypes { - implicit val byteCassandraType: CassandraType[JByte] = CassandraType.of[JByte] - implicit val shortCassandraType: CassandraType[JShort] = CassandraType.of[JShort] - implicit val integerCassandraType: CassandraType[JInt] = CassandraType.of[JInt] - implicit val longCassandraType: CassandraType[JLong] = CassandraType.of[JLong] - implicit val floatCassandraType: CassandraType[JFloat] = CassandraType.of[JFloat] - implicit val doubleCassandraType: CassandraType[JDouble] = CassandraType.of[JDouble] - implicit val booleanCassandraType: CassandraType[JBoolean] = CassandraType.of[JBoolean] - implicit val decimalCassandraType: CassandraType[JBigDecimal] = CassandraType.of[JBigDecimal] - implicit val stringCassandraType: CassandraType[String] = CassandraType.of[String] + implicit val byteCassandraType: CassandraType[JByte] = CassandraType.of[JByte] + implicit val shortCassandraType: CassandraType[JShort] = CassandraType.of[JShort] + implicit val integerCassandraType: CassandraType[JInt] = CassandraType.of[JInt] + implicit val longCassandraType: CassandraType[JLong] = CassandraType.of[JLong] + implicit val floatCassandraType: CassandraType[JFloat] = CassandraType.of[JFloat] + implicit val doubleCassandraType: CassandraType[JDouble] = CassandraType.of[JDouble] + implicit val booleanCassandraType: CassandraType[JBoolean] = CassandraType.of[JBoolean] + implicit val decimalCassandraType: CassandraType[JBigDecimal] = CassandraType.of[JBigDecimal] + implicit val stringCassandraType: CassandraType[String] = CassandraType.of[String] implicit val byteBufferCassandraType: CassandraType[ByteBuffer] = CassandraType.of[ByteBuffer] - implicit val uuidCassandraType: CassandraType[UUID] = CassandraType.of[UUID] - implicit val dateCassandraType: CassandraType[Instant] = CassandraType.of[Instant] - implicit val localDateCassandraType: CassandraType[LocalDate] = CassandraType.of[LocalDate] + implicit val uuidCassandraType: CassandraType[UUID] = CassandraType.of[UUID] + implicit val dateCassandraType: CassandraType[Instant] = CassandraType.of[Instant] + implicit val localDateCassandraType: CassandraType[LocalDate] = CassandraType.of[LocalDate] } /** - * `MappedTypes` contains implicit `CassandraMapper` for Scala primitive/common types - * which are not in relation with CassandraTypes but can be represented as ones. + * `MappedTypes` contains implicit `CassandraMapper` for Scala primitive/common + * types which are not in relation with CassandraTypes but can be represented as + * ones. */ trait CassandraMappedTypes { @@ -51,16 +61,22 @@ trait CassandraMappedTypes { implicit val encodeDouble: CassandraMapper[Double, JDouble, MapperSide.Encode] = CassandraMapper.encode(double2Double) implicit val decodeDouble: CassandraMapper[JDouble, Double, MapperSide.Decode] = CassandraMapper.decode(Double2double) - implicit val encodeBoolean: CassandraMapper[Boolean, JBoolean, MapperSide.Encode] = CassandraMapper.encode(boolean2Boolean) - implicit val decodeBoolean: CassandraMapper[JBoolean, Boolean, MapperSide.Decode] = CassandraMapper.decode(Boolean2boolean) + implicit val encodeBoolean: CassandraMapper[Boolean, JBoolean, MapperSide.Encode] = + CassandraMapper.encode(boolean2Boolean) + implicit val decodeBoolean: CassandraMapper[JBoolean, Boolean, MapperSide.Decode] = + CassandraMapper.decode(Boolean2boolean) - implicit val encodeBigDecimal: CassandraMapper[BigDecimal, JBigDecimal, MapperSide.Encode] = CassandraMapper.encode(_.bigDecimal) - implicit val decodeBigDecimal: CassandraMapper[JBigDecimal, BigDecimal, MapperSide.Decode] = CassandraMapper.decode(BigDecimal.apply) + implicit val encodeBigDecimal: CassandraMapper[BigDecimal, JBigDecimal, MapperSide.Encode] = + CassandraMapper.encode(_.bigDecimal) + implicit val decodeBigDecimal: CassandraMapper[JBigDecimal, BigDecimal, MapperSide.Decode] = + CassandraMapper.decode(BigDecimal.apply) - implicit val encodeByteArray: CassandraMapper[Array[Byte], ByteBuffer, MapperSide.Encode] = CassandraMapper.encode(ByteBuffer.wrap) - implicit val decodeByteArray: CassandraMapper[ByteBuffer, Array[Byte], MapperSide.Decode] = CassandraMapper.decode(bb => { - val b = new Array[Byte](bb.remaining()) - bb.get(b) - b - }) + implicit val encodeByteArray: CassandraMapper[Array[Byte], ByteBuffer, MapperSide.Encode] = + CassandraMapper.encode(ByteBuffer.wrap) + implicit val decodeByteArray: CassandraMapper[ByteBuffer, Array[Byte], MapperSide.Decode] = CassandraMapper.decode { + bb => + val b = new Array[Byte](bb.remaining()) + bb.get(b) + b + } } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionDecoders.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionDecoders.scala index 22b61932e..75bf68734 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionDecoders.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionDecoders.scala @@ -12,25 +12,35 @@ trait CollectionDecoders extends EncodingDsl with CassandraRowContext { this: Decoders => // TODO Remove variable b and put directly - implicit def listDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[List[T]] = { - val b: BaseDecoder[List[T]] = (index, row, session) => row.getList[Cas](index, asClassOf[Cas]).asScala.map(row => mapper.f(row, session)).toList + implicit def listDecoder[T, Cas](implicit + mapper: CassandraMapper[Cas, T, MapperSide.Decode], + ct: ClassTag[Cas] + ): Decoder[List[T]] = { + val b: BaseDecoder[List[T]] = (index, row, session) => + row.getList[Cas](index, asClassOf[Cas]).asScala.map(row => mapper.f(row, session)).toList decoder(b) } - implicit def setDecoder[T, Cas](implicit mapper: CassandraMapper[Cas, T, MapperSide.Decode], ct: ClassTag[Cas]): Decoder[Set[T]] = { - val b: BaseDecoder[Set[T]] = (index, row, session) => row.getSet[Cas](index, asClassOf[Cas]).asScala.map(row => mapper.f(row, session)).toSet + implicit def setDecoder[T, Cas](implicit + mapper: CassandraMapper[Cas, T, MapperSide.Decode], + ct: ClassTag[Cas] + ): Decoder[Set[T]] = { + val b: BaseDecoder[Set[T]] = (index, row, session) => + row.getSet[Cas](index, asClassOf[Cas]).asScala.map(row => mapper.f(row, session)).toSet decoder(b) } - implicit def mapDecoder[K, V, KCas, VCas]( - implicit + implicit def mapDecoder[K, V, KCas, VCas](implicit keyMapper: CassandraMapper[KCas, K, MapperSide.Decode], valMapper: CassandraMapper[VCas, V, MapperSide.Decode], a: ClassTag[KCas], b: ClassTag[VCas] ): Decoder[Map[K, V]] = decoder((index, row, session) => - row.getMap[KCas, VCas](index, asClassOf[KCas], asClassOf[VCas]) - .asScala.map(kv => keyMapper.f(kv._1, session) -> valMapper.f(kv._2, session)).toMap + row + .getMap[KCas, VCas](index, asClassOf[KCas], asClassOf[VCas]) + .asScala + .map(kv => keyMapper.f(kv._1, session) -> valMapper.f(kv._2, session)) + .toMap ) } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionEncoders.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionEncoders.scala index f8faa0c28..ed2b94f1d 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionEncoders.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/CollectionEncoders.scala @@ -1,7 +1,7 @@ package io.getquill.context.cassandra.encoding import com.datastax.oss.driver.api.core.`type`.reflect.GenericType -import com.datastax.oss.driver.shaded.guava.common.reflect.{ TypeParameter, TypeToken } +import com.datastax.oss.driver.shaded.guava.common.reflect.{TypeParameter, TypeToken} import io.getquill.context.cassandra.CassandraRowContext import io.getquill.context.cassandra.util.ClassTagConversions.asClassOf @@ -14,19 +14,36 @@ trait CollectionEncoders extends EncodingDsl with CassandraRowContext { import scala.reflect._ - implicit def listEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas, MapperSide.Encode], ct: ClassTag[Cas]): Encoder[List[T]] = - encoder((index, list, row, session) => row.setList[Cas](index.toInt, list.map(row => mapper.f(row, session)).asJava, asClassOf[Cas])) + implicit def listEncoder[T, Cas](implicit + mapper: CassandraMapper[T, Cas, MapperSide.Encode], + ct: ClassTag[Cas] + ): Encoder[List[T]] = + encoder((index, list, row, session) => + row.setList[Cas](index.toInt, list.map(row => mapper.f(row, session)).asJava, asClassOf[Cas]) + ) - implicit def setEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas, MapperSide.Encode], ct: ClassTag[Cas]): Encoder[Set[T]] = - encoder((index, list, row, session) => row.setSet[Cas](index.toInt, list.map(row => mapper.f(row, session)).asJava, asClassOf[Cas])) + implicit def setEncoder[T, Cas](implicit + mapper: CassandraMapper[T, Cas, MapperSide.Encode], + ct: ClassTag[Cas] + ): Encoder[Set[T]] = + encoder((index, list, row, session) => + row.setSet[Cas](index.toInt, list.map(row => mapper.f(row, session)).asJava, asClassOf[Cas]) + ) - implicit def mapEncoder[K, V, KCas, VCas]( - implicit + implicit def mapEncoder[K, V, KCas, VCas](implicit keyMapper: CassandraMapper[K, KCas, MapperSide.Encode], valMapper: CassandraMapper[V, VCas, MapperSide.Encode], a: ClassTag[KCas], b: ClassTag[VCas] ): Encoder[Map[K, V]] = - encoder((index, map, row, session) => row.setMap(index, map - .map(kv => keyMapper.f(kv._1, session) -> valMapper.f(kv._2, session)).asJava, asClassOf[KCas], asClassOf[VCas])) + encoder((index, map, row, session) => + row.setMap( + index, + map + .map(kv => keyMapper.f(kv._1, session) -> valMapper.f(kv._2, session)) + .asJava, + asClassOf[KCas], + asClassOf[VCas] + ) + ) } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Decoders.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Decoders.scala index 78da26774..47ca1bf61 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Decoders.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Decoders.scala @@ -1,15 +1,21 @@ package io.getquill.context.cassandra.encoding -import com.datastax.oss.driver.internal.core.`type`.{ DefaultListType, PrimitiveType } +import com.datastax.oss.driver.internal.core.`type`.{DefaultListType, PrimitiveType} import io.getquill.context.cassandra.CassandraRowContext import io.getquill.util.Messages.fail import io.getquill.MappedEncoding -import java.time.{ Instant, LocalDate, LocalTime } -import java.util.{ Date, UUID } +import java.time.{Instant, LocalDate, LocalTime} +import java.util.{Date, UUID} import io.getquill.generic._ -trait Decoders extends CassandraRowContext with EncodingDsl with CollectionDecoders with CassandraMapperConversions with CassandraTypes with UdtDecoding { +trait Decoders + extends CassandraRowContext + with EncodingDsl + with CollectionDecoders + with CassandraMapperConversions + with CassandraTypes + with UdtDecoding { type Decoder[T] = CassandraDecoder[T] @@ -18,24 +24,24 @@ trait Decoders extends CassandraRowContext with EncodingDsl with CollectionDecod decoder(index, row, session) } - def decoder[T](d: (Int, ResultRow, Session) => T): Decoder[T] = CassandraDecoder( - (index, row, session) => - if (row.isNull(index) && row.getColumnDefinitions.get(index).getType.isInstanceOf[PrimitiveType]) - fail(s"Expected column at index $index to be defined but is was empty or type is unknown ${row.getColumnDefinitions.get(index).getType.getClass}") - else d(index, row, session) - + def decoder[T](d: (Int, ResultRow, Session) => T): Decoder[T] = CassandraDecoder((index, row, session) => + if (row.isNull(index) && row.getColumnDefinitions.get(index).getType.isInstanceOf[PrimitiveType]) + fail( + s"Expected column at index $index to be defined but is was empty or type is unknown ${row.getColumnDefinitions.get(index).getType.getClass}" + ) + else d(index, row, session) ) def decoder[T](f: ResultRow => Int => T): Decoder[T] = decoder((index, row, session) => f(row)(index)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = - CassandraDecoder((index, row, session) => { + CassandraDecoder { (index, row, session) => row.isNull(index) match { case true => None case false => Some(d(index, row, session)) } - }) + } implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], decoder: Decoder[I]): Decoder[O] = CassandraDecoder(mappedBaseDecoder(mapped, decoder.decoder)) @@ -44,21 +50,21 @@ trait Decoders extends CassandraRowContext with EncodingDsl with CollectionDecod implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder((index, row, session) => row.getBigDecimal(index)) implicit val booleanDecoder: Decoder[Boolean] = decoder(_.getBoolean) - implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) - implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) - implicit val intDecoder: Decoder[Int] = decoder(_.getInt) - implicit val longDecoder: Decoder[Long] = decoder(_.getLong) - implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) - implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) + implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) + implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) + implicit val intDecoder: Decoder[Int] = decoder(_.getInt) + implicit val longDecoder: Decoder[Long] = decoder(_.getLong) + implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) + implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) implicit val byteArrayDecoder: Decoder[Array[Byte]] = - decoder((index, row, session) => { + decoder { (index, row, session) => val bb = row.getByteBuffer(index) - val b = new Array[Byte](bb.remaining()) + val b = new Array[Byte](bb.remaining()) bb.get(b) b - }) - implicit val uuidDecoder: Decoder[UUID] = decoder(_.getUuid) - implicit val timestampDecoder: Decoder[Instant] = decoder(_.getInstant) + } + implicit val uuidDecoder: Decoder[UUID] = decoder(_.getUuid) + implicit val timestampDecoder: Decoder[Instant] = decoder(_.getInstant) implicit val cassandraLocalTimeDecoder: Decoder[LocalTime] = decoder(_.getLocalTime) implicit val cassandraLocalDateDecoder: Decoder[LocalDate] = decoder(_.getLocalDate) } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encoders.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encoders.scala index 82d406778..212e74e46 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encoders.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encoders.scala @@ -4,8 +4,8 @@ import io.getquill.context.cassandra.CassandraRowContext import io.getquill.MappedEncoding import java.nio.ByteBuffer -import java.time.{ Instant, LocalDate, LocalTime } -import java.util.{ Date, UUID } +import java.time.{Instant, LocalDate, LocalTime} +import java.util.{Date, UUID} import io.getquill.generic._ import io.getquill.context.UdtValueLookup import io.getquill.context.cassandra.encoding.UdtEncoding @@ -30,12 +30,12 @@ trait CassandraDecodeMapperMaker[Encoder[_], T] { } trait Encoders -extends CassandraRowContext -with EncodingDsl -with CollectionEncoders -with CassandraMapperConversions -with CassandraTypes -with UdtEncoding { + extends CassandraRowContext + with EncodingDsl + with CollectionEncoders + with CassandraMapperConversions + with CassandraTypes + with UdtEncoding { type Encoder[T] = CassandraEncoder[T] @@ -67,16 +67,16 @@ with UdtEncoding { implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder((index, value, row, _) => row.setBigDecimal(index, value.bigDecimal)) implicit val booleanEncoder: Encoder[Boolean] = encoder(_.setBoolean) - implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte) - implicit val shortEncoder: Encoder[Short] = encoder(_.setShort) - implicit val intEncoder: Encoder[Int] = encoder(_.setInt) - implicit val longEncoder: Encoder[Long] = encoder(_.setLong) - implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat) - implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble) + implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte) + implicit val shortEncoder: Encoder[Short] = encoder(_.setShort) + implicit val intEncoder: Encoder[Int] = encoder(_.setInt) + implicit val longEncoder: Encoder[Long] = encoder(_.setLong) + implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat) + implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder((index, value, row, _) => row.setByteBuffer(index, ByteBuffer.wrap(value))) - implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUuid) - implicit val timestampEncoder: Encoder[Instant] = encoder(_.setInstant) + implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUuid) + implicit val timestampEncoder: Encoder[Instant] = encoder(_.setInstant) implicit val cassandraLocalTimeEncoder: Encoder[LocalTime] = encoder(_.setLocalTime) implicit val cassandraLocalDateEncoder: Encoder[LocalDate] = encoder(_.setLocalDate) } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encodings.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encodings.scala index 81be48801..7a780b4c2 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encodings.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/Encodings.scala @@ -1,6 +1,6 @@ package io.getquill.context.cassandra.encoding -import java.time.{ Instant, LocalDate, ZonedDateTime, ZoneId } +import java.time.{Instant, LocalDate, ZonedDateTime, ZoneId} import io.getquill.MappedEncoding import io.getquill.context.cassandra.CassandraContext @@ -11,5 +11,6 @@ trait Encodings extends CassandraMapperConversions with CassandraTypes { protected val zoneId = ZoneId.systemDefault implicit val encodeJava8ZonedDateTime: MappedEncoding[ZonedDateTime, Instant] = MappedEncoding(zdt => zdt.toInstant) - implicit val decodeJava8ZonedDateTime: MappedEncoding[Instant, ZonedDateTime] = MappedEncoding(d => ZonedDateTime.ofInstant(d, zoneId)) + implicit val decodeJava8ZonedDateTime: MappedEncoding[Instant, ZonedDateTime] = + MappedEncoding(d => ZonedDateTime.ofInstant(d, zoneId)) } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/MirrorFields.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/MirrorFields.scala index f579b5820..ba57f89b0 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/MirrorFields.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/MirrorFields.scala @@ -7,14 +7,16 @@ import io.getquill.util.Format object MirrorFields { import io.getquill.metaprog.TypeExtensions._ - private def recurseCollect[Fields: Type, Types: Type](fieldsTup: Type[Fields], typesTup: Type[Types])(using Quotes): List[(String, Type[_])] = { + private def recurseCollect[Fields: Type, Types: Type](fieldsTup: Type[Fields], typesTup: Type[Types])(using + Quotes + ): List[(String, Type[_])] = { import quotes.reflect._ (fieldsTup, typesTup) match { case ('[field *: fields], '[tpe *: types]) => val fieldValue = Type.of[field].constValue (fieldValue, Type.of[tpe]) :: recurseCollect[fields, types](Type.of[fields], Type.of[types]) case (_, '[EmptyTuple]) => Nil - case _ => report.throwError("Cannot Derive Product during Type Flattening of Expression:\n" + typesTup) + case _ => report.throwError("Cannot Derive Product during Type Flattening of Expression:\n" + typesTup) } } @@ -23,14 +25,22 @@ object MirrorFields { Expr.summon[Mirror.Of[T]] match { case Some(ev) => ev match { - case '{ $m: Mirror.ProductOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes }} => + case '{ + $m: Mirror.ProductOf[T] { + type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes + } + } => (m, recurseCollect[elementLabels, elementTypes](Type.of[elementLabels], Type.of[elementTypes])) - case '{ $m: Mirror.SumOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes }} => - report.throwError(s"The detected type of ${Format.TypeOf[T]} is a Sum (i.e. Enum or Sealed trait hiearchy. Only Product-type (i.e. Case-Class) UDTs are supported.") + case '{ + $m: Mirror.SumOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } + } => + report.throwError( + s"The detected type of ${Format.TypeOf[T]} is a Sum (i.e. Enum or Sealed trait hiearchy. Only Product-type (i.e. Case-Class) UDTs are supported." + ) } case None => val traces = Thread.currentThread.getStackTrace.take(50).map(" " + _.toString).mkString("\n") report.throwError(s"Could not detect mirror for: ${Format.TypeOf[T]}") } } -} // end MirrorFields \ No newline at end of file +} // end MirrorFields diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncoding.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncoding.scala index 074747264..bd064dcca 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncoding.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncoding.scala @@ -4,22 +4,25 @@ import com.datastax.oss.driver.api.core.data.UdtValue import io.getquill.Udt import io.getquill.context.cassandra.CassandraRowContext import io.getquill.generic.EncodingDsl -import io.getquill.context.{ CassandraSession, ExecutionInfo, Context, UdtValueLookup } +import io.getquill.context.{CassandraSession, ExecutionInfo, Context, UdtValueLookup} import com.datastax.oss.driver.api.core.cql.Row import com.datastax.oss.driver.api.core.cql.BoundStatement import scala.util.NotGiven /** - * UdtDecoding and UdtEncoding classes govern implicit summoning of Udt Encoders/Decoders. - * Unlike Scala2-Quill, in ProtoQuill these need to be separate classes since - * if would have encoders & decoders in one, Encoders self-type would also need to to contain decoders. - * Also note that because I have decided to allow the types Decoder[_], Encoder[_] to be parameters, - * despite the fact that really it is CassandraEncoder and CassandraDecoder that are used everywhere. - * This is to allow future Cassandra context implementations not to have to use that. - * Also, note that just like EncodingDsl.anyValEncoder/Decoder, this implementation is subject - * to the problem of https://github.com/lampepfl/dotty/issues/12179 and therefore needs to have the same - * workaround i.e. having the macro call returning a `CassandraDecoderMaker[Decoder, T] => Decoder[T]` - * as opposed to being able to use `self.decoder/encoder` in the macro directly. + * UdtDecoding and UdtEncoding classes govern implicit summoning of Udt + * Encoders/Decoders. Unlike Scala2-Quill, in ProtoQuill these need to be + * separate classes since if would have encoders & decoders in one, Encoders + * self-type would also need to to contain decoders. Also note that because I + * have decided to allow the types Decoder[_], Encoder[_] to be parameters, + * despite the fact that really it is CassandraEncoder and CassandraDecoder that + * are used everywhere. This is to allow future Cassandra context + * implementations not to have to use that. Also, note that just like + * EncodingDsl.anyValEncoder/Decoder, this implementation is subject to the + * problem of https://github.com/lampepfl/dotty/issues/12179 and therefore needs + * to have the same workaround i.e. having the macro call returning a + * `CassandraDecoderMaker[Decoder, T] => Decoder[T]` as opposed to being able to + * use `self.decoder/encoder` in the macro directly. */ trait UdtDecoding extends CassandraRowContext { self: Decoders => implicit inline def udtDecoder[T <: Udt](using NotGiven[T =:= Udt]): Decoder[T] = @@ -29,10 +32,13 @@ trait UdtDecoding extends CassandraRowContext { self: Decoders => } ) - implicit inline def udtDecodeMapper[T <: Udt](using NotGiven[T =:= Udt]): CassandraMapper[UdtValue, T, MapperSide.Decode] = + implicit inline def udtDecodeMapper[T <: Udt](using + NotGiven[T =:= Udt] + ): CassandraMapper[UdtValue, T, MapperSide.Decode] = UdtDecodingMacro.udtDecodeMapper( new CassandraDecodeMapperMaker[Decoder, T] { - def apply(f: (UdtValue, UdtValueLookup) => T): CassandraMapper[UdtValue, T, MapperSide.Decode] = CassandraMapper(f) + def apply(f: (UdtValue, UdtValueLookup) => T): CassandraMapper[UdtValue, T, MapperSide.Decode] = + CassandraMapper(f) } ) } @@ -49,7 +55,8 @@ trait UdtEncoding extends CassandraRowContext { self: Encoders => implicit inline def udtEncodeMapper[T <: Udt]: CassandraMapper[T, UdtValue, MapperSide.Encode] = UdtEncodingMacro.udtEncoderMapper( new CassandraEncodeMapperMaker[Encoder, T] { - def apply(f: (T, UdtValueLookup) => UdtValue): CassandraMapper[T, UdtValue, MapperSide.Encode] = CassandraMapper(f) + def apply(f: (T, UdtValueLookup) => UdtValue): CassandraMapper[T, UdtValue, MapperSide.Encode] = + CassandraMapper(f) } ) diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala index 8a0042afd..ed545669f 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtEncodingMacro.scala @@ -13,7 +13,7 @@ import com.datastax.oss.driver.api.core.cql.BoundStatement import scala.reflect.ClassTag import io.getquill.util.Format import io.getquill.context.UdtValueLookup -import io.getquill.generic.ElaborateStructure.{ TermType, Leaf, Branch } +import io.getquill.generic.ElaborateStructure.{TermType, Leaf, Branch} import com.datastax.oss.driver.api.core.cql.Row import io.getquill.util.ThreadUtil import io.getquill.generic.ConstructType @@ -21,49 +21,58 @@ import io.getquill.generic.ElaborateStructure.UdtBehavior object UdtDecodingMacro { - private[UdtDecodingMacro] case class UdtParams[T <: Udt](udt: Expr[UdtValue], meta: Expr[UdtMeta[T]], sess: Expr[UdtValueLookup]) + private[UdtDecodingMacro] case class UdtParams[T <: Udt]( + udt: Expr[UdtValue], + meta: Expr[UdtMeta[T]], + sess: Expr[UdtValueLookup] + ) inline def udtDecoder[Decoder[_], T <: Udt]: CassandraDecoderMaker[Decoder, T] => Decoder[T] = ${ udtDecoderImpl[Decoder, T] } - def udtDecoderImpl[Decoder[_]: Type, T <: Udt: Type](using Quotes): Expr[CassandraDecoderMaker[Decoder, T] => Decoder[T]] = { + def udtDecoderImpl[Decoder[_]: Type, T <: Udt: Type](using + Quotes + ): Expr[CassandraDecoderMaker[Decoder, T] => Decoder[T]] = { import quotes.reflect._ import scala.deriving._ val madeOrFoundMeta = UdtMeta.build[T] - val decodeUdt = new UdtDecoderMaker[Decoder, T].apply + val decodeUdt = new UdtDecoderMaker[Decoder, T].apply if (TypeRepr.of[T] =:= TypeRepr.of[Udt]) - report.warning(s"Attempting to make UDT decoder for from ${Format.TypeOf[Decoder[T]]}:\n${ThreadUtil.currentThreadTrace}") - - '{ - (cem: CassandraDecoderMaker[Decoder, T]) => { - val meta = ${madeOrFoundMeta} - cem.apply ((i: Int, row: Row, sess: UdtValueLookup) => { - val udtValue = row.getUdtValue(i) - ${ decodeUdt(UdtParams('udtValue, 'meta, 'sess)) } - } ) + report.warning( + s"Attempting to make UDT decoder for from ${Format.TypeOf[Decoder[T]]}:\n${ThreadUtil.currentThreadTrace}" + ) + + '{ (cem: CassandraDecoderMaker[Decoder, T]) => + val meta = ${ madeOrFoundMeta } + cem.apply { (i: Int, row: Row, sess: UdtValueLookup) => + val udtValue = row.getUdtValue(i) + ${ decodeUdt(UdtParams('udtValue, 'meta, 'sess)) } } } } - inline def udtDecodeMapper[Encoder[_], T <: Udt]: CassandraDecodeMapperMaker[Encoder, T] => CassandraMapper[UdtValue, T, MapperSide.Decode] = ${ udtDecodeMapperImpl[Encoder, T] } + inline def udtDecodeMapper[Encoder[_], T <: Udt] + : CassandraDecodeMapperMaker[Encoder, T] => CassandraMapper[UdtValue, T, MapperSide.Decode] = ${ + udtDecodeMapperImpl[Encoder, T] + } - def udtDecodeMapperImpl[Encoder[_]: Type, T <: Udt: Type](using Quotes): Expr[CassandraDecodeMapperMaker[Encoder, T] => CassandraMapper[UdtValue, T, MapperSide.Decode]] = { + def udtDecodeMapperImpl[Encoder[_]: Type, T <: Udt: Type](using + Quotes + ): Expr[CassandraDecodeMapperMaker[Encoder, T] => CassandraMapper[UdtValue, T, MapperSide.Decode]] = { import quotes.reflect._ val madeOrFoundMeta = UdtMeta.build[T] // TODO quill.trace.types 'summoning' level should enable this - //println(s"**** Mapper summoning decode of: ${Format.TypeOf[T]}") + // println(s"**** Mapper summoning decode of: ${Format.TypeOf[T]}") val decodeUdt = new UdtDecoderMaker[Encoder, T].apply - '{ - (cem: CassandraDecodeMapperMaker[Encoder, T]) => { - val meta = ${madeOrFoundMeta} - cem.apply((udtValue, sess) => { - val udt = sess.udtValueOf(meta.name, meta.keyspace) - ${ decodeUdt(UdtParams('udtValue, 'meta, 'sess)) } - }) + '{ (cem: CassandraDecodeMapperMaker[Encoder, T]) => + val meta = ${ madeOrFoundMeta } + cem.apply { (udtValue, sess) => + val udt = sess.udtValueOf(meta.name, meta.keyspace) + ${ decodeUdt(UdtParams('udtValue, 'meta, 'sess)) } } } } @@ -71,256 +80,311 @@ object UdtDecodingMacro { class UdtDecoderMaker[Encoder[_]: Type, T <: Udt: Type](using Quotes) { import quotes.reflect._ - def apply: UdtParams[T] => Expr[T] = { - (info: UdtParams[T]) => { - // TODO Shared between encoder and decoder. Extract - def lookupField(name: String) = - '{ ${info.meta}.alias(${Expr(name)}).getOrElse(${Expr(name)}) } - - def getField[C: Type](udtValue: Expr[UdtValue], fieldName: String, mapper: Expr[CassandraMapper[_, C, MapperSide.Decode]]) = - mapper.asTerm.tpe.asType match { - case '[CassandraMapper[fromT, C, MapperSide.Decode]] => - val lookedUpField = lookupField(fieldName) - '{ - val classTag = ${summonClassTagOrFail[fromT]}.runtimeClass.asInstanceOf[Class[fromT]] - val fieldValue = $udtValue.get[fromT]($lookedUpField, classTag) - $mapper.asInstanceOf[CassandraMapper[fromT, C, MapperSide.Decode]].f(fieldValue, ${info.sess}) - } - } - - // Elem is the elem type of the encoder. C is the component (i.e. udt-field) type - def getOptional[C: Type](udtValue: Expr[UdtValue], fieldName: String) = - '{ Option(${ getField[C](udtValue, fieldName, summonMapperOrFail[C]) }) } - - def getRegular[C: Type](udtValue: Expr[UdtValue], fieldName: String) = - getField[C](udtValue, fieldName, summonMapperOrFail[C]) + def apply: UdtParams[T] => Expr[T] = { (info: UdtParams[T]) => + // TODO Shared between encoder and decoder. Extract + def lookupField(name: String) = + '{ ${ info.meta }.alias(${ Expr(name) }).getOrElse(${ Expr(name) }) } + + def getField[C: Type]( + udtValue: Expr[UdtValue], + fieldName: String, + mapper: Expr[CassandraMapper[_, C, MapperSide.Decode]] + ) = + mapper.asTerm.tpe.asType match { + case '[CassandraMapper[fromT, C, MapperSide.Decode]] => + val lookedUpField = lookupField(fieldName) + '{ + val classTag = ${ summonClassTagOrFail[fromT] }.runtimeClass.asInstanceOf[Class[fromT]] + val fieldValue = $udtValue.get[fromT]($lookedUpField, classTag) + $mapper.asInstanceOf[CassandraMapper[fromT, C, MapperSide.Decode]].f(fieldValue, ${ info.sess }) + } + } - // TODO Try swapping out all asInstanceOf for asExprOf outside the expression - def getList[C: Type](udtValue: Expr[UdtValue], fieldName: String) = { - val lookedUpField = lookupField(fieldName) - val mapper = summonMapperOrFail[C] - mapper.asTerm.tpe.asType match { - case '[CassandraMapper[fromT, C, MapperSide.Decode]] => - val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[fromT, C, MapperSide.Decode]] } - val classFromT = '{ ${summonClassTagOrFail[fromT]}.runtimeClass.asInstanceOf[Class[fromT]] } - '{ UdtValueOps($udtValue) + // Elem is the elem type of the encoder. C is the component (i.e. udt-field) type + def getOptional[C: Type](udtValue: Expr[UdtValue], fieldName: String) = + '{ Option(${ getField[C](udtValue, fieldName, summonMapperOrFail[C]) }) } + + def getRegular[C: Type](udtValue: Expr[UdtValue], fieldName: String) = + getField[C](udtValue, fieldName, summonMapperOrFail[C]) + + // TODO Try swapping out all asInstanceOf for asExprOf outside the expression + def getList[C: Type](udtValue: Expr[UdtValue], fieldName: String) = { + val lookedUpField = lookupField(fieldName) + val mapper = summonMapperOrFail[C] + mapper.asTerm.tpe.asType match { + case '[CassandraMapper[fromT, C, MapperSide.Decode]] => + val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[fromT, C, MapperSide.Decode]] } + val classFromT = '{ ${ summonClassTagOrFail[fromT] }.runtimeClass.asInstanceOf[Class[fromT]] } + '{ + UdtValueOps($udtValue) .getScalaList[fromT]($lookedUpField, $classFromT) - .map(row => $typedMapper.f(row, ${info.sess})) + .map(row => $typedMapper.f(row, ${ info.sess })) .toList - } - } + } } + } - def getSet[C: Type](udtValue: Expr[UdtValue], fieldName: String) = { - val lookedUpField = lookupField(fieldName) - val mapper = summonMapperOrFail[C] - mapper.asTerm.tpe.asType match { - case '[CassandraMapper[fromT, C, MapperSide.Decode]] => - val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[fromT, C, MapperSide.Decode]] } - val classFromT = '{ ${summonClassTagOrFail[fromT]}.runtimeClass.asInstanceOf[Class[fromT]] } - '{ UdtValueOps($udtValue) + def getSet[C: Type](udtValue: Expr[UdtValue], fieldName: String) = { + val lookedUpField = lookupField(fieldName) + val mapper = summonMapperOrFail[C] + mapper.asTerm.tpe.asType match { + case '[CassandraMapper[fromT, C, MapperSide.Decode]] => + val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[fromT, C, MapperSide.Decode]] } + val classFromT = '{ ${ summonClassTagOrFail[fromT] }.runtimeClass.asInstanceOf[Class[fromT]] } + '{ + UdtValueOps($udtValue) .getScalaSet[fromT]($lookedUpField, $classFromT) - .map(row => $typedMapper.f(row, ${info.sess})) + .map(row => $typedMapper.f(row, ${ info.sess })) .toSet - } - } + } } + } - def getMap[CK: Type, CV: Type](udtValue: Expr[UdtValue], fieldName: String) = { - val lookedUpField = lookupField(fieldName) - val keyMapper = summonMapperOrFail[CK] - val valMapper = summonMapperOrFail[CV] - (keyMapper.asTerm.tpe.asType, valMapper.asTerm.tpe.asType) match { - case ('[CassandraMapper[fromKT, CK, MapperSide.Decode]], '[CassandraMapper[fromVT, CV, MapperSide.Decode]]) => - val typedKeyMapper = '{ $keyMapper.asInstanceOf[CassandraMapper[fromKT, CK, MapperSide.Decode]] } - val typedValMapper = '{ $valMapper.asInstanceOf[CassandraMapper[fromVT, CV, MapperSide.Decode]] } - val classFromKT = '{ ${summonClassTagOrFail[fromKT]}.runtimeClass.asInstanceOf[Class[fromKT]] } - val classFromVT = '{ ${summonClassTagOrFail[fromVT]}.runtimeClass.asInstanceOf[Class[fromVT]] } - '{ UdtValueOps($udtValue) + def getMap[CK: Type, CV: Type](udtValue: Expr[UdtValue], fieldName: String) = { + val lookedUpField = lookupField(fieldName) + val keyMapper = summonMapperOrFail[CK] + val valMapper = summonMapperOrFail[CV] + (keyMapper.asTerm.tpe.asType, valMapper.asTerm.tpe.asType) match { + case ('[CassandraMapper[fromKT, CK, MapperSide.Decode]], '[CassandraMapper[fromVT, CV, MapperSide.Decode]]) => + val typedKeyMapper = '{ $keyMapper.asInstanceOf[CassandraMapper[fromKT, CK, MapperSide.Decode]] } + val typedValMapper = '{ $valMapper.asInstanceOf[CassandraMapper[fromVT, CV, MapperSide.Decode]] } + val classFromKT = '{ ${ summonClassTagOrFail[fromKT] }.runtimeClass.asInstanceOf[Class[fromKT]] } + val classFromVT = '{ ${ summonClassTagOrFail[fromVT] }.runtimeClass.asInstanceOf[Class[fromVT]] } + '{ + UdtValueOps($udtValue) .getScalaMap[fromKT, fromVT]($lookedUpField, $classFromKT, $classFromVT) .map[CK, CV](row => - ($typedKeyMapper.f(row._1, ${info.sess}), $typedValMapper.f(row._2, ${info.sess})) - ).toMap - } - } - } - - def deriveComponents = { - val (mirror, mirrorFields) = MirrorFields.of[T] - val mappedFields = - mirrorFields.map { - case (fieldName, t @ '[List[tpe]]) => - (t, getList[tpe](info.udt, fieldName)) - case (fieldName, t @ '[Set[tpe]]) => - (t, getSet[tpe](info.udt, fieldName)) - case (fieldName, t @ '[Map[tpeK, tpeV]]) => - (t, getMap[tpeK, tpeV](info.udt, fieldName)) - case (fieldName, t @ '[Option[tpe]]) => - (t, getOptional[tpe](info.udt, fieldName)) - case (fieldName, t @ '[tpe]) => - (t, getRegular[tpe](info.udt, fieldName)) + ($typedKeyMapper.f(row._1, ${ info.sess }), $typedValMapper.f(row._2, ${ info.sess })) + ) + .toMap } - (mirror, mappedFields) } + } - val (mirror, mappedFields) = deriveComponents - val out = ConstructType(mirror, mappedFields) - out + def deriveComponents = { + val (mirror, mirrorFields) = MirrorFields.of[T] + val mappedFields = + mirrorFields.map { + case (fieldName, t @ '[List[tpe]]) => + (t, getList[tpe](info.udt, fieldName)) + case (fieldName, t @ '[Set[tpe]]) => + (t, getSet[tpe](info.udt, fieldName)) + case (fieldName, t @ '[Map[tpeK, tpeV]]) => + (t, getMap[tpeK, tpeV](info.udt, fieldName)) + case (fieldName, t @ '[Option[tpe]]) => + (t, getOptional[tpe](info.udt, fieldName)) + case (fieldName, t @ '[tpe]) => + (t, getRegular[tpe](info.udt, fieldName)) + } + (mirror, mappedFields) } + + val (mirror, mappedFields) = deriveComponents + val out = ConstructType(mirror, mappedFields) + out } // end apply def summonClassTagOrFail[CT: Type] = - Expr.summon[ClassTag[CT]] match { - case Some(ct) => ct - case None => - report.throwError(s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon ClassTag for ${Format.TypeOf[CT]}") - } + Expr.summon[ClassTag[CT]] match { + case Some(ct) => ct + case None => + report.throwError( + s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon ClassTag for ${Format.TypeOf[CT]}" + ) + } def summonMapperOrFail[MT: Type]: Expr[CassandraMapper[_, MT, MapperSide.Decode]] = { import quotes.reflect._ Expr.summon[CassandraMapper[_, MT, MapperSide.Decode]] match { case Some(cm) => cm case None => - report.throwError(s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon a CassandraMapper[${Format.TypeOf[MT]}, _, ${Format.TypeOf[MapperSide.Decode]}]") + report.throwError(s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon a CassandraMapper[${Format + .TypeOf[MT]}, _, ${Format.TypeOf[MapperSide.Decode]}]") } } } // end UdtDecoderMaker } // end UdtDecodingMacro - object UdtEncodingMacro { - private[UdtEncodingMacro] case class UdtParams[T <: Udt: Type](elem: Expr[T], udt: Expr[UdtValue], meta: Expr[UdtMeta[T]], sess: Expr[UdtValueLookup]) + private[UdtEncodingMacro] case class UdtParams[T <: Udt: Type]( + elem: Expr[T], + udt: Expr[UdtValue], + meta: Expr[UdtMeta[T]], + sess: Expr[UdtValueLookup] + ) class UdtEncoderMaker[Encoder[_]: Type, T <: Udt: Type](using Quotes) { import quotes.reflect._ def apply: (UdtParams[T] => Expr[UdtValue], TermType) = { - val (deconstructedEntityComponents, elaborationType) = ElaborateStructure.decomposedProductValueDetails[T](ElaborationSide.Encoding, UdtBehavior.Derive) + val (deconstructedEntityComponents, elaborationType) = + ElaborateStructure.decomposedProductValueDetails[T](ElaborationSide.Encoding, UdtBehavior.Derive) // val ents = deconstructedEntityComponents.map { case (t, o, g, tpe) => s"(${t} --> ${Format.Expr(g)})"} - //println(s"Components of: ${Format.TypeOf[T]}: ${ents}" ) - - val udtMaker = { - (info: UdtParams[T]) => { - def lookupField(name: String) = - '{ ${info.meta}.alias(${Expr(name)}).getOrElse(${Expr(name)}) } // todo insert summoned naming strategy here - - def setField[C: Type](fieldValue: Expr[C], fieldName: String, udt: Expr[UdtValue], mapper: Expr[CassandraMapper[C, _, MapperSide.Encode]]) = - mapper.asTerm.tpe.asType match { - case '[CassandraMapper[C, toT, MapperSide.Encode]] => - val lookedUpField = lookupField(fieldName) - '{ $udt.set[toT]( - $lookedUpField, - $mapper.asInstanceOf[CassandraMapper[C, toT, MapperSide.Encode]].f($fieldValue, ${info.sess}), - ${summonClassTagOrFail[toT]}.runtimeClass.asInstanceOf[Class[toT]]) - } - } - - // Elem is the elem type of the encoder. C is the component (i.e. udt-field) type - def setOptional[Elem: Type, C: Type](fieldValue: Expr[Elem], fieldName: String, udt: Expr[UdtValue], getter: Expr[Elem] => Expr[?]) = - '{ - ${getter.apply(fieldValue)}.asInstanceOf[Option[C]].map(v => - ${ setField[C]('v, fieldName, udt, summonMapperOrFail[C]) } - ).getOrElse($udt.setToNull(${lookupField(fieldName)})) - } + // println(s"Components of: ${Format.TypeOf[T]}: ${ents}" ) - def setRegular[Elem: Type, C: Type](fieldValue: Expr[Elem], fieldName: String, udt: Expr[UdtValue], getter: Expr[Elem] => Expr[?]) = { - val v = '{ ${getter.apply(fieldValue)}.asInstanceOf[C] } - setField(v, fieldName, udt, summonMapperOrFail[C]) + val udtMaker = { (info: UdtParams[T]) => + def lookupField(name: String) = + '{ + ${ info.meta }.alias(${ Expr(name) }).getOrElse(${ Expr(name) }) + } // todo insert summoned naming strategy here + + def setField[C: Type]( + fieldValue: Expr[C], + fieldName: String, + udt: Expr[UdtValue], + mapper: Expr[CassandraMapper[C, _, MapperSide.Encode]] + ) = + mapper.asTerm.tpe.asType match { + case '[CassandraMapper[C, toT, MapperSide.Encode]] => + val lookedUpField = lookupField(fieldName) + '{ + $udt.set[toT]( + $lookedUpField, + $mapper.asInstanceOf[CassandraMapper[C, toT, MapperSide.Encode]].f($fieldValue, ${ info.sess }), + ${ summonClassTagOrFail[toT] }.runtimeClass.asInstanceOf[Class[toT]] + ) + } } - // TODO Try swapping out all asInstanceOf for asExprOf outside the expression - def setList[Elem: Type, C: Type](fieldValue: Expr[Elem], fieldName: String, udt: Expr[UdtValue], getter: Expr[Elem] => Expr[?]) = { - val lookedUpField = lookupField(fieldName) - val mapper = summonMapperOrFail[C] - mapper.asTerm.tpe.asType match { - case '[CassandraMapper[C, toT, MapperSide.Encode]] => - val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[C, toT, MapperSide.Encode]] } - val list = '{ ${getter.apply(fieldValue)}.asInstanceOf[List[C]].map(row => $typedMapper.f(row, ${info.sess})) } - val classToT = '{ ${summonClassTagOrFail[toT]}.runtimeClass.asInstanceOf[Class[toT]] } - '{ UdtValueOps($udt).setScalaList[toT]($lookedUpField, $list, $classToT) } - } + // Elem is the elem type of the encoder. C is the component (i.e. udt-field) type + def setOptional[Elem: Type, C: Type]( + fieldValue: Expr[Elem], + fieldName: String, + udt: Expr[UdtValue], + getter: Expr[Elem] => Expr[?] + ) = + '{ + ${ getter.apply(fieldValue) } + .asInstanceOf[Option[C]] + .map(v => ${ setField[C]('v, fieldName, udt, summonMapperOrFail[C]) }) + .getOrElse($udt.setToNull(${ lookupField(fieldName) })) } - def setSet[Elem: Type, C: Type](fieldValue: Expr[Elem], fieldName: String, udt: Expr[UdtValue], getter: Expr[Elem] => Expr[?]) = { - val lookedUpField = lookupField(fieldName) - val mapper = summonMapperOrFail[C] - mapper.asTerm.tpe.asType match { - case '[CassandraMapper[C, toT, MapperSide.Encode]] => - val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[C, toT, MapperSide.Encode]] } - val set = '{ ${getter.apply(fieldValue)}.asInstanceOf[Set[C]].map(row => $typedMapper.f(row, ${info.sess})) } - val classToT = '{ ${summonClassTagOrFail[toT]}.runtimeClass.asInstanceOf[Class[toT]] } - '{ UdtValueOps($udt).setScalaSet[toT]($lookedUpField, $set, $classToT) } - } + def setRegular[Elem: Type, C: Type]( + fieldValue: Expr[Elem], + fieldName: String, + udt: Expr[UdtValue], + getter: Expr[Elem] => Expr[?] + ) = { + val v = '{ ${ getter.apply(fieldValue) }.asInstanceOf[C] } + setField(v, fieldName, udt, summonMapperOrFail[C]) + } + + // TODO Try swapping out all asInstanceOf for asExprOf outside the expression + def setList[Elem: Type, C: Type]( + fieldValue: Expr[Elem], + fieldName: String, + udt: Expr[UdtValue], + getter: Expr[Elem] => Expr[?] + ) = { + val lookedUpField = lookupField(fieldName) + val mapper = summonMapperOrFail[C] + mapper.asTerm.tpe.asType match { + case '[CassandraMapper[C, toT, MapperSide.Encode]] => + val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[C, toT, MapperSide.Encode]] } + val list = '{ + ${ getter.apply(fieldValue) }.asInstanceOf[List[C]].map(row => $typedMapper.f(row, ${ info.sess })) + } + val classToT = '{ ${ summonClassTagOrFail[toT] }.runtimeClass.asInstanceOf[Class[toT]] } + '{ UdtValueOps($udt).setScalaList[toT]($lookedUpField, $list, $classToT) } } + } - def setMap[Elem: Type, CK: Type, CV: Type](fieldValue: Expr[Elem], fieldName: String, udt: Expr[UdtValue], getter: Expr[Elem] => Expr[?]) = { - val lookedUpField = lookupField(fieldName) - val keyMapper = summonMapperOrFail[CK] - val valMapper = summonMapperOrFail[CV] - (keyMapper.asTerm.tpe.asType, valMapper.asTerm.tpe.asType) match { - case ('[CassandraMapper[CK, toKT, MapperSide.Encode]], '[CassandraMapper[CV, toVT, MapperSide.Encode]]) => - val typedKeyMapper = '{ $keyMapper.asInstanceOf[CassandraMapper[CK, toKT, MapperSide.Encode]] } - val typedValMapper = '{ $valMapper.asInstanceOf[CassandraMapper[CV, toVT, MapperSide.Encode]] } - val map = - '{ ${getter.apply(fieldValue)}.asInstanceOf[Map[CK, CV]] - .map[toKT, toVT](kv => ($typedKeyMapper.f(kv._1, ${info.sess}), $typedValMapper.f(kv._2, ${info.sess}))) - } - val classToKT = '{ ${summonClassTagOrFail[toKT]}.runtimeClass.asInstanceOf[Class[toKT]] } - val classToVT = '{ ${summonClassTagOrFail[toVT]}.runtimeClass.asInstanceOf[Class[toVT]] } - '{ UdtValueOps($udt).setScalaMap[toKT, toVT]($lookedUpField, $map, $classToKT, $classToVT) } - } + def setSet[Elem: Type, C: Type]( + fieldValue: Expr[Elem], + fieldName: String, + udt: Expr[UdtValue], + getter: Expr[Elem] => Expr[?] + ) = { + val lookedUpField = lookupField(fieldName) + val mapper = summonMapperOrFail[C] + mapper.asTerm.tpe.asType match { + case '[CassandraMapper[C, toT, MapperSide.Encode]] => + val typedMapper = '{ $mapper.asInstanceOf[CassandraMapper[C, toT, MapperSide.Encode]] } + val set = '{ + ${ getter.apply(fieldValue) }.asInstanceOf[Set[C]].map(row => $typedMapper.f(row, ${ info.sess })) + } + val classToT = '{ ${ summonClassTagOrFail[toT] }.runtimeClass.asInstanceOf[Class[toT]] } + '{ UdtValueOps($udt).setScalaSet[toT]($lookedUpField, $set, $classToT) } } + } - val components = - deconstructedEntityComponents.map { - case (fieldName, isOptional, getter, fieldType) => - fieldType match { - case '[List[tpe]] => - setList[T, tpe](info.elem, fieldName, info.udt, getter) - case '[Set[tpe]] => - setSet[T, tpe](info.elem, fieldName, info.udt, getter) - case '[Map[tpeK, tpeV]] => - setMap[T, tpeK, tpeV](info.elem, fieldName, info.udt, getter) - case '[tpe] => - if (isOptional) - setOptional[T, tpe](info.elem, fieldName, info.udt, getter) - else - setRegular[T, tpe](info.elem, fieldName, info.udt, getter) + def setMap[Elem: Type, CK: Type, CV: Type]( + fieldValue: Expr[Elem], + fieldName: String, + udt: Expr[UdtValue], + getter: Expr[Elem] => Expr[?] + ) = { + val lookedUpField = lookupField(fieldName) + val keyMapper = summonMapperOrFail[CK] + val valMapper = summonMapperOrFail[CV] + (keyMapper.asTerm.tpe.asType, valMapper.asTerm.tpe.asType) match { + case ('[CassandraMapper[CK, toKT, MapperSide.Encode]], '[CassandraMapper[CV, toVT, MapperSide.Encode]]) => + val typedKeyMapper = '{ $keyMapper.asInstanceOf[CassandraMapper[CK, toKT, MapperSide.Encode]] } + val typedValMapper = '{ $valMapper.asInstanceOf[CassandraMapper[CV, toVT, MapperSide.Encode]] } + val map = + '{ + ${ getter.apply(fieldValue) } + .asInstanceOf[Map[CK, CV]] + .map[toKT, toVT](kv => + ($typedKeyMapper.f(kv._1, ${ info.sess }), $typedValMapper.f(kv._2, ${ info.sess })) + ) } + val classToKT = '{ ${ summonClassTagOrFail[toKT] }.runtimeClass.asInstanceOf[Class[toKT]] } + val classToVT = '{ ${ summonClassTagOrFail[toVT] }.runtimeClass.asInstanceOf[Class[toVT]] } + '{ UdtValueOps($udt).setScalaMap[toKT, toVT]($lookedUpField, $map, $classToKT, $classToVT) } + } + } + + val components = + deconstructedEntityComponents.map { case (fieldName, isOptional, getter, fieldType) => + fieldType match { + case '[List[tpe]] => + setList[T, tpe](info.elem, fieldName, info.udt, getter) + case '[Set[tpe]] => + setSet[T, tpe](info.elem, fieldName, info.udt, getter) + case '[Map[tpeK, tpeV]] => + setMap[T, tpeK, tpeV](info.elem, fieldName, info.udt, getter) + case '[tpe] => + if (isOptional) + setOptional[T, tpe](info.elem, fieldName, info.udt, getter) + else + setRegular[T, tpe](info.elem, fieldName, info.udt, getter) } + } - if (components.isEmpty) - report.throwError(s"The udt-type ${Format.TypeOf[T]} does not have any fields") + if (components.isEmpty) + report.throwError(s"The udt-type ${Format.TypeOf[T]} does not have any fields") - val otherCalls = components.dropRight(1) - val lastCall = components.last - Block(otherCalls.map(_.asTerm), lastCall.asTerm).asExprOf[UdtValue] - } + val otherCalls = components.dropRight(1) + val lastCall = components.last + Block(otherCalls.map(_.asTerm), lastCall.asTerm).asExprOf[UdtValue] } (udtMaker, elaborationType) } def summonClassTagOrFail[CT: Type] = - Expr.summon[ClassTag[CT]] match { - case Some(ct) => ct - case None => - report.throwError(s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon ClassTag for ${Format.TypeOf[CT]}") - } + Expr.summon[ClassTag[CT]] match { + case Some(ct) => ct + case None => + report.throwError( + s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon ClassTag for ${Format.TypeOf[CT]}" + ) + } def summonMapperOrFail[MT: Type]: Expr[CassandraMapper[MT, _, MapperSide.Encode]] = { import quotes.reflect._ Expr.summon[CassandraMapper[MT, _, MapperSide.Encode]] match { case Some(cm) => cm case None => - report.throwError(s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon a CassandraMapper[${Format.TypeOf[MT]}, _, ${Format.TypeOf[MapperSide.Encode]}]") + report.throwError(s"Error creating Encoder[${Format.TypeOf[T]}]. Cannot summon a CassandraMapper[${Format + .TypeOf[MT]}, _, ${Format.TypeOf[MapperSide.Encode]}]") } } - - } // end UdtEncoderMaker // Assuming that: @@ -329,31 +393,38 @@ object UdtEncodingMacro { ${ udtEncoderImpl[Encoder, T] } /** - * Generate an Encoder[T] for some arbitrary Encoder[_] and arbitrary T <: Udt. This - * is done via the Generic Derivation mechanisms of the ProtoQuill Elaborators. + * Generate an Encoder[T] for some arbitrary Encoder[_] and arbitrary T <: + * Udt. This is done via the Generic Derivation mechanisms of the ProtoQuill + * Elaborators. * - * Since we can't directly return an Encoder due to https://github.com/lampepfl/dotty/issues/12179 - * (have a look at the AnyVal encoder for more info), we pass it in as a parameter and the - * the CassandraEncoderMaker pattern to generate it. If we assume that all the encoders cassandra - * contexts ever need to use are CassandraEncoder instances then we don't need this parameter - * and maybe can simplify. Need to look into this some more. + * Since we can't directly return an Encoder due to + * https://github.com/lampepfl/dotty/issues/12179 (have a look at the AnyVal + * encoder for more info), we pass it in as a parameter and the the + * CassandraEncoderMaker pattern to generate it. If we assume that all the + * encoders cassandra contexts ever need to use are CassandraEncoder instances + * then we don't need this parameter and maybe can simplify. Need to look into + * this some more. */ - def udtEncoderImpl[Encoder[_]: Type, T <: Udt: Type](using Quotes): Expr[CassandraEncoderMaker[Encoder, T] => Encoder[T]] = { + def udtEncoderImpl[Encoder[_]: Type, T <: Udt: Type](using + Quotes + ): Expr[CassandraEncoderMaker[Encoder, T] => Encoder[T]] = { import quotes.reflect._ import scala.deriving._ - val madeOrFoundMeta = UdtMeta.build[T] + val madeOrFoundMeta = UdtMeta.build[T] val (encodeUdt, elaborationType) = new UdtEncoderMaker[Encoder, T].apply def synthesizeEncoder: Expr[CassandraEncoderMaker[Encoder, T] => Encoder[T]] = - '{ - (cem: CassandraEncoderMaker[Encoder, T]) => { - val meta = ${madeOrFoundMeta} - cem.apply ((i: Int, elem: T, row: BoundStatement, sess: UdtValueLookup) => row.setUdtValue(i, { - val udt = sess.udtValueOf(meta.name, meta.keyspace) - ${ encodeUdt(UdtParams('elem, 'udt, 'meta, 'sess)) } - })) - } + '{ (cem: CassandraEncoderMaker[Encoder, T]) => + val meta = ${ madeOrFoundMeta } + cem.apply((i: Int, elem: T, row: BoundStatement, sess: UdtValueLookup) => + row.setUdtValue( + i, { + val udt = sess.udtValueOf(meta.name, meta.keyspace) + ${ encodeUdt(UdtParams('elem, 'udt, 'meta, 'sess)) } + } + ) + ) } elaborationType match { @@ -372,21 +443,24 @@ object UdtEncodingMacro { } } - inline def udtEncoderMapper[Encoder[_], T <: Udt]: CassandraEncodeMapperMaker[Encoder, T] => CassandraMapper[T, UdtValue, MapperSide.Encode] = ${ udtEncoderMapperImpl[Encoder, T] } + inline def udtEncoderMapper[Encoder[_], T <: Udt] + : CassandraEncodeMapperMaker[Encoder, T] => CassandraMapper[T, UdtValue, MapperSide.Encode] = ${ + udtEncoderMapperImpl[Encoder, T] + } - def udtEncoderMapperImpl[Encoder[_]: Type, T <: Udt: Type](using Quotes): Expr[CassandraEncodeMapperMaker[Encoder, T] => CassandraMapper[T, UdtValue, MapperSide.Encode]] = { + def udtEncoderMapperImpl[Encoder[_]: Type, T <: Udt: Type](using + Quotes + ): Expr[CassandraEncodeMapperMaker[Encoder, T] => CassandraMapper[T, UdtValue, MapperSide.Encode]] = { import quotes.reflect._ val madeOrFoundMeta = UdtMeta.build[T] // TODO quill.trace.types 'summoning' level should enable this - //println(s"**** Mapper summoning encoder of: ${Format.TypeOf[T]}") + // println(s"**** Mapper summoning encoder of: ${Format.TypeOf[T]}") val (encodeUdt, _) = new UdtEncoderMaker[Encoder, T].apply - '{ - (cem: CassandraEncodeMapperMaker[Encoder, T]) => { - val meta = ${madeOrFoundMeta} - cem.apply((elem, sess) => { - val udt = sess.udtValueOf(meta.name, meta.keyspace) - ${ encodeUdt(UdtParams('elem, 'udt, 'meta, 'sess)) } - }) + '{ (cem: CassandraEncodeMapperMaker[Encoder, T]) => + val meta = ${ madeOrFoundMeta } + cem.apply { (elem, sess) => + val udt = sess.udtValueOf(meta.name, meta.keyspace) + ${ encodeUdt(UdtParams('elem, 'udt, 'meta, 'sess)) } } } } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtOps.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtOps.scala index e07dd0363..1896f385d 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtOps.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/encoding/UdtOps.scala @@ -5,29 +5,23 @@ import com.datastax.oss.driver.api.core.data.UdtValue import scala.jdk.CollectionConverters._ class UdtValueOps(val udt: UdtValue) extends AnyVal { - def getScalaList[A](name: String, cls: Class[A]) = { + def getScalaList[A](name: String, cls: Class[A]) = udt.getList(name, cls).asScala - } - def getScalaSet[A](name: String, cls: Class[A]) = { + def getScalaSet[A](name: String, cls: Class[A]) = udt.getSet(name, cls).asScala - } - def getScalaMap[K, V](name: String, kcls: Class[K], vcls: Class[V]) = { + def getScalaMap[K, V](name: String, kcls: Class[K], vcls: Class[V]) = udt.getMap(name, kcls, vcls).asScala - } - def setScalaList[A](name: String, v: Seq[A], cls: Class[A]) = { + def setScalaList[A](name: String, v: Seq[A], cls: Class[A]) = udt.setList(name, v.asJava, cls) - } - def setScalaSet[A](name: String, v: Set[A], cls: Class[A]) = { + def setScalaSet[A](name: String, v: Set[A], cls: Class[A]) = udt.setSet(name, v.asJava, cls) - } - def setScalaMap[K, V](name: String, v: Map[K, V], kcls: Class[K], vcls: Class[V]) = { + def setScalaMap[K, V](name: String, v: Map[K, V], kcls: Class[K], vcls: Class[V]) = udt.setMap(name, v.asJava, kcls, vcls) - } } object UdtValueOps { diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/FutureConversions.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/FutureConversions.scala index 5769bdf1e..da7ef50a0 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/FutureConversions.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/FutureConversions.scala @@ -4,23 +4,25 @@ import com.datastax.oss.driver.shaded.guava.common.util.concurrent.ListenableFut import java.util.concurrent.Executor -import scala.concurrent.{ ExecutionContext, Future, Promise } +import scala.concurrent.{ExecutionContext, Future, Promise} import scala.util.Try object FutureConversions { - implicit class ListenableFutureConverter[A](val lf: ListenableFuture[A]) - extends AnyVal { + implicit class ListenableFutureConverter[A](val lf: ListenableFuture[A]) extends AnyVal { def asScala(implicit ec: ExecutionContext): Future[A] = { val promise = Promise[A]() - lf.addListener(new Runnable { - def run(): Unit = { - promise.complete(Try(lf.get())) - () + lf.addListener( + new Runnable { + def run(): Unit = { + promise.complete(Try(lf.get())) + () + } + }, + new Executor { + override def execute(command: Runnable): Unit = ec.execute(command) } - }, new Executor { - override def execute(command: Runnable): Unit = ec.execute(command) - }) + ) promise.future } diff --git a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/UdtMetaUtils.scala b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/UdtMetaUtils.scala index 000b91aca..3bbf2e643 100644 --- a/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/UdtMetaUtils.scala +++ b/quill-cassandra/src/main/scala/io/getquill/context/cassandra/util/UdtMetaUtils.scala @@ -3,11 +3,14 @@ package io.getquill.context.cassandra.util import io.getquill.util.Messages object UdtMetaUtils { + /** * Extracts udt name and keyspace from given path * - * @param path udt path - * @return (name, keyspace) + * @param path + * udt path + * @return + * (name, keyspace) */ def parse(path: String): (String, Option[String]) = { val arr = path.split('.') diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextConfigSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextConfigSpec.scala index 8e6c1d5e2..db1625d7a 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextConfigSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextConfigSpec.scala @@ -1,7 +1,7 @@ package io.getquill.context.cassandra import com.typesafe.config.ConfigFactory -import io.getquill.{ CassandraContextConfig, Spec } +import io.getquill.{CassandraContextConfig, Spec} class CassandraContextConfigSpec extends Spec { "load default preparedStatementCacheSize if not found in configs" in { diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextSpec.scala index c8dccbfa7..66d0ab0f6 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CassandraContextSpec.scala @@ -4,13 +4,14 @@ import io.getquill._ import io.getquill.context.ExecutionInfo import io.getquill.ReturnAction.ReturnNothing -import scala.concurrent.ExecutionContext.Implicits.{ global => ec } -import scala.util.{ Success, Try } +import scala.concurrent.ExecutionContext.Implicits.{global => ec} +import scala.util.{Success, Try} class CassandraContextSpec extends Spec { // val et = io.getquill.context.ExecutionType // val vvv = et.lifts - val unknown = ExecutionInfo(io.getquill.context.ExecutionType.Static, io.getquill.ast.NullValue, io.getquill.quat.Quat.Value) + val unknown = + ExecutionInfo(io.getquill.context.ExecutionType.Static, io.getquill.ast.NullValue, io.getquill.quat.Quat.Value) "run non-batched action" - { @@ -34,10 +35,10 @@ class CassandraContextSpec extends Spec { "fail on returning" in { import testSyncDB._ - val p: Prepare = (x, session) => (Nil, x) + val p: Prepare = (x, session) => (Nil, x) val e: Extractor[Int] = (_, _) => 1 - intercept[IllegalStateException](executeBatchActionReturning(Nil, e)(unknown, ec)).getMessage + intercept[IllegalStateException](executeBatchActionReturning(Nil, e)(unknown, ec)).getMessage } "probe" in { diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CollectionsSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CollectionsSpec.scala index 974e5cc99..6e27e1c18 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CollectionsSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CollectionsSpec.scala @@ -1,6 +1,6 @@ package io.getquill.context.cassandra -import io.getquill.{ MappedEncoding, Spec } +import io.getquill.{MappedEncoding, Spec} import org.scalatest.BeforeAndAfterEach import io.getquill.MappedEncoding import io.getquill.context.cassandra.encoding.CassandraMapper diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlIdiomSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlIdiomSpec.scala index 5f3275ed8..dc49b0e16 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlIdiomSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlIdiomSpec.scala @@ -2,7 +2,7 @@ package io.getquill.context.cassandra import io.getquill._ import io.getquill.idiom.StatementInterpolator._ -import io.getquill.ast.{ Action => AstAction, Query => _, _ } +import io.getquill.ast.{Action => AstAction, Query => _, _} import io.getquill.idiom.StringToken import io.getquill.Query import io.getquill.quat.Quat @@ -387,11 +387,19 @@ class CqlIdiomSpec extends Spec { } "assignment" in { val a: Ast = Assignment(Ident("a"), Ident("b"), Ident("c")) - translate(a: Ast, Quat.Unknown, ExecutionType.Unknown, IdiomContext.Empty) mustBe ((a, stmt"b = c", ExecutionType.Unknown)) + translate(a: Ast, Quat.Unknown, ExecutionType.Unknown, IdiomContext.Empty) mustBe (( + a, + stmt"b = c", + ExecutionType.Unknown + )) } "assignmentDual" in { val a: Ast = AssignmentDual(Ident("a1"), Ident("a2"), Ident("b"), Ident("c")) - translate(a: Ast, Quat.Unknown, ExecutionType.Unknown, IdiomContext.Empty) mustBe ((a, stmt"b = c", ExecutionType.Unknown)) + translate(a: Ast, Quat.Unknown, ExecutionType.Unknown, IdiomContext.Empty) mustBe (( + a, + stmt"b = c", + ExecutionType.Unknown + )) } "aggregation" in { val t = implicitly[Tokenizer[AggregationOperator]] diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlQuerySpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlQuerySpec.scala index a432519d5..8366b397f 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlQuerySpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/CqlQuerySpec.scala @@ -139,49 +139,65 @@ class CqlQuerySpec extends Spec { inline def q = quote { qr1.flatMap(r1 => qr2.filter(_.i == r1.i)) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support flatMap." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support flatMap." } "groupBy not supported" in { inline def q = quote { qr1.groupBy(t => t.i) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support groupBy." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support groupBy." } "union not supported" in { inline def q = quote { qr1.filter(_.i == 0).union(qr1.filter(_.i == 1)) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support union/unionAll." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support union/unionAll." } "unionAll not supported" in { inline def q = quote { qr1.filter(_.i == 0).unionAll(qr1.filter(_.i == 1)) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support union/unionAll." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support union/unionAll." } "join not supported" in { inline def q = quote { qr1.join(qr2).on((a, b) => a.i == b.i) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support InnerJoin." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support InnerJoin." } "leftJoin not supported" in { inline def q = quote { qr1.leftJoin(qr2).on((a, b) => a.i == b.i) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support LeftJoin." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support LeftJoin." } "rightJoin not supported" in { inline def q = quote { qr1.rightJoin(qr2).on((a, b) => a.i == b.i) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support RightJoin." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support RightJoin." } "fullJoin not supported" in { inline def q = quote { qr1.fullJoin(qr2).on((a, b) => a.i == b.i) } - intercept[IllegalStateException](CqlQuery(q.ast.asInstanceOf[ast.Query])).getMessage mustEqual "Cql doesn't support FullJoin." + intercept[IllegalStateException]( + CqlQuery(q.ast.asInstanceOf[ast.Query]) + ).getMessage mustEqual "Cql doesn't support FullJoin." } "sortBy after take" in { inline def q = quote { diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/DecodeNullSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/DecodeNullSpec.scala index 6a6ebd607..1e31697db 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/DecodeNullSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/DecodeNullSpec.scala @@ -24,8 +24,8 @@ class DecodeNullSpec extends Spec { val result = for { - _ <- testAsyncDB.run(writeEntities.delete) - _ <- testAsyncDB.run(writeEntities.insertValue(lift(insertee))) + _ <- testAsyncDB.run(writeEntities.delete) + _ <- testAsyncDB.run(writeEntities.insertValue(lift(insertee))) result <- testAsyncDB.run(query[DecodeNullTestEntity]) } yield { result diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala index 1d2f73a9c..cde8010a9 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpec.scala @@ -1,10 +1,9 @@ package io.getquill.context.cassandra -import java.time.{ Instant, LocalDate, ZoneId, ZonedDateTime } +import java.time.{Instant, LocalDate, ZoneId, ZonedDateTime} import io.getquill.Query import io.getquill._ - class EncodingSpec extends EncodingSpecHelper { "encodes and decodes types" - { @@ -21,8 +20,8 @@ class EncodingSpec extends EncodingSpecHelper { import scala.concurrent.ExecutionContext.Implicits.global await { for { - _ <- testAsyncDB.run(query[EncodingTestEntity].delete) - _ <- testAsyncDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) + _ <- testAsyncDB.run(query[EncodingTestEntity].delete) + _ <- testAsyncDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) result <- testAsyncDB.run(query[EncodingTestEntity]) } yield { verify(result) @@ -34,9 +33,8 @@ class EncodingSpec extends EncodingSpecHelper { "encodes collections" - { "sync" in { import testSyncDB._ - inline def q = quote { - (list: Query[Int]) => - query[EncodingTestEntity].filter(t => list.contains(t.id)) + inline def q = quote { (list: Query[Int]) => + query[EncodingTestEntity].filter(t => list.contains(t.id)) } testSyncDB.run(query[EncodingTestEntity]) testSyncDB.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) @@ -46,9 +44,8 @@ class EncodingSpec extends EncodingSpecHelper { "async" in { import testAsyncDB._ import scala.concurrent.ExecutionContext.Implicits.global - inline def q = quote { - (list: Query[Int]) => - query[EncodingTestEntity].filter(t => list.contains(t.id)) + inline def q = quote { (list: Query[Int]) => + query[EncodingTestEntity].filter(t => list.contains(t.id)) } await { for { @@ -90,15 +87,15 @@ class EncodingSpec extends EncodingSpecHelper { val ctx = testSyncDB import ctx._ - val epoh = System.currentTimeMillis() - val epohDay = epoh / 86400000L - val instant = Instant.ofEpochMilli(epoh) + val epoh = System.currentTimeMillis() + val epohDay = epoh / 86400000L + val instant = Instant.ofEpochMilli(epoh) val zonedDateTime = ZonedDateTime.ofInstant(instant, ZoneId.systemDefault) inline def jq = quote(querySchema[Java8Types]("EncodingTestEntity")) - val j = Java8Types(LocalDate.ofEpochDay(epohDay), instant, Some(zonedDateTime)) + val j = Java8Types(LocalDate.ofEpochDay(epohDay), instant, Some(zonedDateTime)) inline def cq = quote(querySchema[CasTypes]("EncodingTestEntity")) - val c = CasTypes(LocalDate.ofEpochDay(epohDay), Instant.ofEpochMilli(epoh), Some(zonedDateTime)) + val c = CasTypes(LocalDate.ofEpochDay(epohDay), Instant.ofEpochMilli(epoh), Some(zonedDateTime)) ctx.run(jq.delete) ctx.run(jq.insertValue(lift(j))) diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala index b8e6f7b68..1d8037bd3 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/EncodingSpecHelper.scala @@ -1,9 +1,9 @@ package io.getquill.context.cassandra -import java.util.{ Date, UUID } +import java.util.{Date, UUID} import io.getquill.Spec -import java.time.{ Instant, LocalDate } +import java.time.{Instant, LocalDate} abstract class EncodingSpecHelper extends Spec { protected def verify(result: List[EncodingTestEntity]): Unit = @@ -43,29 +43,29 @@ abstract class EncodingSpecHelper extends Spec { } case class EncodingTestEntity( - id: Int, - v1: String, - v2: BigDecimal, - v3: Boolean, - v4: Int, - v5: Long, - v6: Float, - v7: Double, - v8: Array[Byte], - v9: LocalDate, + id: Int, + v1: String, + v2: BigDecimal, + v3: Boolean, + v4: Int, + v5: Long, + v6: Float, + v7: Double, + v8: Array[Byte], + v9: LocalDate, v10: UUID, v11: Instant, v12: Byte, v13: Short, - o1: Option[String], - o2: Option[BigDecimal], - o3: Option[Boolean], - o4: Option[Int], - o5: Option[Long], - o6: Option[Float], - o7: Option[Double], - o8: Option[Array[Byte]], - o9: Option[Instant], + o1: Option[String], + o2: Option[BigDecimal], + o3: Option[Boolean], + o4: Option[Int], + o5: Option[Long], + o6: Option[Float], + o7: Option[Double], + o8: Option[Array[Byte]], + o9: Option[Instant], o10: Option[LocalDate] ) @@ -106,8 +106,8 @@ abstract class EncodingSpecHelper extends Spec { v3 = false, v4 = 0, v5 = 0L, - v6 = 0F, - v7 = 0D, + v6 = 0f, + v7 = 0d, v8 = Array(), v9 = LocalDate.ofEpochDay(0), v10 = fixUUID, diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ListsEncodingSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ListsEncodingSpec.scala index 3a6f17ff5..919773df2 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ListsEncodingSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ListsEncodingSpec.scala @@ -12,22 +12,32 @@ class ListsEncodingSpec extends CollectionsSpec { import ctx._ case class ListsEntity( - id: Int, - texts: List[String], - decimals: List[BigDecimal], - bools: List[Boolean], - bytes: List[Byte], - shorts: List[Short], - ints: List[Int], - longs: List[Long], - floats: List[Float], - doubles: List[Double], - dates: List[LocalDate], + id: Int, + texts: List[String], + decimals: List[BigDecimal], + bools: List[Boolean], + bytes: List[Byte], + shorts: List[Short], + ints: List[Int], + longs: List[Long], + floats: List[Float], + doubles: List[Double], + dates: List[LocalDate], timestamps: List[Instant], - uuids: List[UUID] + uuids: List[UUID] ) - val e = ListsEntity(1, List("c"), List(BigDecimal(1.33)), List(true), List(0, 1), List(3, 2), List(1, 2), List(2, 3), - List(1f, 3f), List(5d), List(LocalDate.now()), + val e = ListsEntity( + 1, + List("c"), + List(BigDecimal(1.33)), + List(true), + List(0, 1), + List(3, 2), + List(1, 2), + List(2, 3), + List(1f, 3f), + List(5d), + List(LocalDate.now()), List(Instant.now().truncatedTo(ChronoUnit.MILLIS)), List(UUID.randomUUID()) ) @@ -47,7 +57,7 @@ class ListsEncodingSpec extends CollectionsSpec { "Empty lists and optional fields" in { case class Entity(id: Int, texts: Option[List[String]], bools: Option[List[Boolean]], ints: List[Int]) - val e = Entity(1, Some(List("1", "2")), None, Nil) + val e = Entity(1, Some(List("1", "2")), None, Nil) inline def q = quote(querySchema[Entity]("ListsEntity")) ctx.run(q.insertValue(lift(e))) @@ -57,7 +67,7 @@ class ListsEncodingSpec extends CollectionsSpec { "Mapped encoding for CassandraType" in { case class StrEntity(id: Int, texts: List[StrWrap]) - val e = StrEntity(1, List("1", "2").map(StrWrap.apply)) + val e = StrEntity(1, List("1", "2").map(StrWrap.apply)) inline def q = quote(querySchema[StrEntity]("ListsEntity")) ctx.run(q.insertValue(lift(e))) @@ -66,7 +76,7 @@ class ListsEncodingSpec extends CollectionsSpec { "Mapped encoding for CassandraMapper types" in { case class IntEntity(id: Int, ints: List[IntWrap]) - val e = IntEntity(1, List(1, 2).map(IntWrap.apply)) + val e = IntEntity(1, List(1, 2).map(IntWrap.apply)) inline def q = quote(querySchema[IntEntity]("ListsEntity")) ctx.run(q.insertValue(lift(e))) @@ -75,12 +85,11 @@ class ListsEncodingSpec extends CollectionsSpec { "Blob (Array[Byte]) support" in { case class BlobsEntity(id: Int, blobs: List[Array[Byte]]) - val e = BlobsEntity(1, List(Array(1.toByte, 2.toByte), Array(2.toByte))) + val e = BlobsEntity(1, List(Array(1.toByte, 2.toByte), Array(2.toByte))) inline def q = quote(querySchema[BlobsEntity]("ListsEntity")) ctx.run(q.insertValue(lift(e))) - ctx.run(q.filter(_.id == 1)) - .head.blobs.map(_.toList) mustBe e.blobs.map(_.toList) + ctx.run(q.filter(_.id == 1)).head.blobs.map(_.toList) mustBe e.blobs.map(_.toList) } "List in where clause / contains" in { diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/MapsEncodingSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/MapsEncodingSpec.scala index 0b266ab12..f92e54438 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/MapsEncodingSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/MapsEncodingSpec.scala @@ -11,15 +11,19 @@ class MapsEncodingSpec extends CollectionsSpec { import ctx._ case class MapsEntity( - id: Int, - textDecimal: Map[String, BigDecimal], - intDouble: Map[Int, Double], - longFloat: Map[Long, Float], - boolDate: Map[Boolean, LocalDate], + id: Int, + textDecimal: Map[String, BigDecimal], + intDouble: Map[Int, Double], + longFloat: Map[Long, Float], + boolDate: Map[Boolean, LocalDate], uuidTimestamp: Map[UUID, Instant] ) - val e = MapsEntity(1, Map("1" -> BigDecimal(1)), Map(1 -> 1d, 2 -> 2d, 3 -> 3d), Map(1L -> 3f), - Map(true -> LocalDate.now()), + val e = MapsEntity( + 1, + Map("1" -> BigDecimal(1)), + Map(1 -> 1d, 2 -> 2d, 3 -> 3d), + Map(1L -> 3f), + Map(true -> LocalDate.now()), Map(UUID.randomUUID() -> Instant.now().truncatedTo(ChronoUnit.MILLIS)) ) inline def q = quote(query[MapsEntity]) @@ -31,12 +35,12 @@ class MapsEncodingSpec extends CollectionsSpec { "Empty maps and optional fields" in { case class Entity( - id: Int, + id: Int, textDecimal: Option[Map[String, BigDecimal]], - intDouble: Option[Map[Int, Double]], - longFloat: Map[Long, Float] + intDouble: Option[Map[Int, Double]], + longFloat: Map[Long, Float] ) - val e = Entity(1, Some(Map("1" -> BigDecimal(1))), None, Map()) + val e = Entity(1, Some(Map("1" -> BigDecimal(1))), None, Map()) inline def q = quote(querySchema[Entity]("MapsEntity")) ctx.run(q.insertValue(lift(e))) @@ -45,7 +49,7 @@ class MapsEncodingSpec extends CollectionsSpec { "Mapped encoding for CassandraType" in { case class StrEntity(id: Int, textDecimal: Map[StrWrap, BigDecimal]) - val e = StrEntity(1, Map(StrWrap("1") -> BigDecimal(1))) + val e = StrEntity(1, Map(StrWrap("1") -> BigDecimal(1))) inline def q = quote(querySchema[StrEntity]("MapsEntity")) ctx.run(q.insertValue(lift(e))) @@ -54,7 +58,7 @@ class MapsEncodingSpec extends CollectionsSpec { "Mapped encoding for CassandraMapper types" in { case class IntEntity(id: Int, intDouble: Map[IntWrap, Double]) - val e = IntEntity(1, Map(IntWrap(1) -> 1d)) + val e = IntEntity(1, Map(IntWrap(1) -> 1d)) inline def q = quote(querySchema[IntEntity]("MapsEntity")) ctx.run(q.insertValue(lift(e))) diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/PeopleCassandraSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/PeopleCassandraSpec.scala index 639c56ee3..741017376 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/PeopleCassandraSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/PeopleCassandraSpec.scala @@ -21,8 +21,8 @@ class PeopleCassandraSpec extends Spec { () } - inline def q = quote { - (ids: Query[Int]) => query[Person].filter(p => ids.contains(p.id)) + inline def q = quote { (ids: Query[Int]) => + query[Person].filter(p => ids.contains(p.id)) } "Contains id" - { diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala index 9f6525f43..fd658cfb4 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSpec.scala @@ -19,14 +19,14 @@ trait QueryResultTypeCassandraSpec extends Spec { OrderTestEntity(3, 3) ) - inline def insert = quote((e: OrderTestEntity) => query[OrderTestEntity].insertValue(e)) - inline def deleteAll = quote(query[OrderTestEntity].delete) - inline def selectAll = quote(query[OrderTestEntity]) - inline def map = quote(query[OrderTestEntity].map(_.id)) - inline def filter = quote(query[OrderTestEntity].filter(_.id == 1)) + inline def insert = quote((e: OrderTestEntity) => query[OrderTestEntity].insertValue(e)) + inline def deleteAll = quote(query[OrderTestEntity].delete) + inline def selectAll = quote(query[OrderTestEntity]) + inline def map = quote(query[OrderTestEntity].map(_.id)) + inline def filter = quote(query[OrderTestEntity].filter(_.id == 1)) inline def withFilter = quote(query[OrderTestEntity].withFilter(_.id == 1)) - inline def sortBy = quote(query[OrderTestEntity].filter(_.id == 1).sortBy(_.i)(Ord.asc)) - inline def take = quote(query[OrderTestEntity].take(10)) + inline def sortBy = quote(query[OrderTestEntity].filter(_.id == 1).sortBy(_.i)(Ord.asc)) + inline def take = quote(query[OrderTestEntity].take(10)) inline def entitySize = quote(query[OrderTestEntity].size) inline def parametrizedSize = quote { (id: Int) => query[OrderTestEntity].filter(_.id == id).size diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSyncSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSyncSpec.scala index a85c19188..eb7519fb3 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSyncSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/QueryResultTypeCassandraSyncSpec.scala @@ -3,7 +3,6 @@ package io.getquill.context.cassandra import io.getquill.CassandraSyncContext import io.getquill._ - class QueryResultTypeCassandraSyncSpec extends QueryResultTypeCassandraSpec { val context: CassandraSyncContext[Literal.type] with CassandraTestEntities = testSyncDB diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/SetsEncodingSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/SetsEncodingSpec.scala index cdca5877e..5297b124f 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/SetsEncodingSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/SetsEncodingSpec.scala @@ -11,21 +11,31 @@ class SetsEncodingSpec extends CollectionsSpec { import ctx._ case class SetsEntity( - id: Int, - texts: Set[String], - decimals: Set[BigDecimal], - bools: Set[Boolean], - ints: Set[Int], - longs: Set[Long], - floats: Set[Float], - doubles: Set[Double], - dates: Set[LocalDate], + id: Int, + texts: Set[String], + decimals: Set[BigDecimal], + bools: Set[Boolean], + ints: Set[Int], + longs: Set[Long], + floats: Set[Float], + doubles: Set[Double], + dates: Set[LocalDate], timestamps: Set[Instant], - uuids: Set[UUID] + uuids: Set[UUID] + ) + val e = SetsEntity( + 1, + Set("c"), + Set(BigDecimal(1.33)), + Set(true), + Set(1, 2), + Set(2, 3), + Set(1f, 3f), + Set(5d), + Set(LocalDate.now()), + Set(Instant.now().truncatedTo(ChronoUnit.MILLIS)), + Set(UUID.randomUUID()) ) - val e = SetsEntity(1, Set("c"), Set(BigDecimal(1.33)), Set(true), Set(1, 2), Set(2, 3), Set(1f, 3f), - Set(5d), Set(LocalDate.now()), - Set(Instant.now().truncatedTo(ChronoUnit.MILLIS)), Set(UUID.randomUUID())) inline def q = quote(query[SetsEntity]) "Set encoders/decoders" in { @@ -35,7 +45,7 @@ class SetsEncodingSpec extends CollectionsSpec { "Empty sets and optional fields" in { case class Entity(id: Int, texts: Option[Set[String]], bools: Option[Set[Boolean]], ints: Set[Int]) - val e = Entity(1, Some(Set("1", "2")), None, Set()) + val e = Entity(1, Some(Set("1", "2")), None, Set()) inline def q = quote(querySchema[Entity]("SetsEntity")) ctx.run(q.insertValue(lift(e))) @@ -44,7 +54,7 @@ class SetsEncodingSpec extends CollectionsSpec { "Mapped encoding for CassandraType" in { case class StrEntity(id: Int, texts: Set[StrWrap]) - val e = StrEntity(1, Set("1", "2").map(StrWrap.apply)) + val e = StrEntity(1, Set("1", "2").map(StrWrap.apply)) inline def q = quote(querySchema[StrEntity]("SetsEntity")) ctx.run(q.insertValue(lift(e))) @@ -53,7 +63,7 @@ class SetsEncodingSpec extends CollectionsSpec { "Mapped encoding for CassandraMapper types" in { case class IntEntity(id: Int, ints: Set[IntWrap]) - val e = IntEntity(1, Set(1, 2).map(IntWrap.apply)) + val e = IntEntity(1, Set(1, 2).map(IntWrap.apply)) inline def q = quote(querySchema[IntEntity]("SetsEntity")) ctx.run(q.insertValue(lift(e))) @@ -62,12 +72,11 @@ class SetsEncodingSpec extends CollectionsSpec { "Blob (Array[Byte]) support" in { case class BlobsEntity(id: Int, blobs: Set[Array[Byte]]) - val e = BlobsEntity(1, Set(Array(1.toByte, 2.toByte), Array(2.toByte))) + val e = BlobsEntity(1, Set(Array(1.toByte, 2.toByte), Array(2.toByte))) inline def q = quote(querySchema[BlobsEntity]("SetsEntity")) ctx.run(q.insertValue(lift(e))) - ctx.run(q.filter(_.id == 1)) - .head.blobs.map(_.toSet) mustBe e.blobs.map(_.toSet) + ctx.run(q.filter(_.id == 1)).head.blobs.map(_.toSet) mustBe e.blobs.map(_.toSet) } "Set in where clause" in { diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ops/CassandraOpsSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ops/CassandraOpsSpec.scala index 32ff4f568..af023dd76 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ops/CassandraOpsSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/ops/CassandraOpsSpec.scala @@ -53,7 +53,7 @@ class CassandraOpsSpec extends Spec { "options" - { "timestamp" in { inline def q = quote { - query[TestEntity].usingTimestamp(99).updateValue(lift(TestEntity("s", 1, 2L, None, true))) //hello + query[TestEntity].usingTimestamp(99).updateValue(lift(TestEntity("s", 1, 2L, None, true))) // hello } mirrorContext.run(q).string mustEqual "UPDATE TestEntity USING TIMESTAMP 99 SET s = ?, i = ?, l = ?, o = ?, b = ?" diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/package.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/package.scala index 6d0856b15..fb530a0f5 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/package.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/package.scala @@ -9,7 +9,7 @@ import io.getquill.Literal package object cassandra { - lazy val mirrorContext = new CassandraMirrorContext(Literal) with CassandraTestEntities + lazy val mirrorContext = new CassandraMirrorContext(Literal) with CassandraTestEntities lazy val capsMirrorContext = new CassandraMirrorContext(UpperCaseNonDefault) with CassandraTestEntities lazy val testSyncDB = new CassandraSyncContext(Literal, "testSyncDB") with CassandraTestEntities diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingMirrorContextSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingMirrorContextSpec.scala index 7d2e4fe86..1c0dd3c39 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingMirrorContextSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingMirrorContextSpec.scala @@ -29,7 +29,11 @@ class UdtEncodingMirrorContextSpec extends UdtSpec { "Encode/decode UDT within entity" in { case class User(id: Int, name: Name, names: List[Name]) mirrorContext.run(query[User]).string mustBe "SELECT id, name, names FROM User" - mirrorContext.run(query[User] - .insertValue(lift(User(1, Name("1", None), Nil)))).string mustBe "INSERT INTO User (id,name,names) VALUES (?, ?, ?)" + mirrorContext + .run( + query[User] + .insertValue(lift(User(1, Name("1", None), Nil))) + ) + .string mustBe "INSERT INTO User (id,name,names) VALUES (?, ?, ?)" } } diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingSessionContextSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingSessionContextSpec.scala index 934715729..d7d605170 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingSessionContextSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtEncodingSessionContextSpec.scala @@ -10,12 +10,15 @@ import com.datastax.oss.driver.api.core.data.UdtValue import com.typesafe.config.ConfigValueFactory import io.getquill.context.cassandra.encoding.MapperSide - class UdtEncodingSessionContextSpec extends UdtSpec { val ctx1 = testSyncDB - val config0 = CassandraContextConfig(LoadConfig("testSyncDB").withValue("keyspace", ConfigValueFactory.fromAnyRef("system"))) - val config2 = CassandraContextConfig(LoadConfig("testSyncDB").withValue("keyspace", ConfigValueFactory.fromAnyRef("quill_test_2"))) + val config0 = CassandraContextConfig( + LoadConfig("testSyncDB").withValue("keyspace", ConfigValueFactory.fromAnyRef("system")) + ) + val config2 = CassandraContextConfig( + LoadConfig("testSyncDB").withValue("keyspace", ConfigValueFactory.fromAnyRef("quill_test_2")) + ) val ctx2 = new CassandraSyncContext(SnakeCase, config2) "Provide encoding for UDT" - { @@ -34,7 +37,7 @@ class UdtEncodingSessionContextSpec extends UdtSpec { } "nested" in { // Can use this to check if the mapper could be summoned - //implicitly[CassandraMapper[Name, UDTValue, MapperSide.Encode]] + // implicitly[CassandraMapper[Name, UDTValue, MapperSide.Encode]] implicitly[Decoder[Personal]] implicitly[Encoder[Personal]] implicitly[Decoder[List[Personal]]] @@ -59,14 +62,19 @@ class UdtEncodingSessionContextSpec extends UdtSpec { "without meta" in { case class WithEverything(id: Int, personal: Personal, nameList: List[Name]) - val e = WithEverything(1, Personal(1, "strt", - Name("first", Some("last")), - Some(Name("f", None)), - List("e"), - Set(1, 2), - Map(1 -> "1", 2 -> "2") + val e = WithEverything( + 1, + Personal( + 1, + "strt", + Name("first", Some("last")), + Some(Name("f", None)), + List("e"), + Set(1, 2), + Map(1 -> "1", 2 -> "2") ), - List(Name("first", None))) + List(Name("first", None)) + ) ctx1.run(query[WithEverything].insertValue(lift(e))) ctx1.run(query[WithEverything].filter(_.id == 1)).headOption must contain(e) } @@ -129,7 +137,6 @@ class UdtEncodingSessionContextSpec extends UdtSpec { () } - override protected def afterAll(): Unit = { + override protected def afterAll(): Unit = ctx2.close() - } } diff --git a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtSpec.scala b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtSpec.scala index 3a94b0db7..50d1fd4d0 100644 --- a/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtSpec.scala +++ b/quill-cassandra/src/test/scala/io/getquill/context/cassandra/udt/UdtSpec.scala @@ -6,12 +6,12 @@ import io.getquill.Udt trait UdtSpec extends Spec { case class Name(firstName: String, lastName: Option[String]) extends Udt case class Personal( - number: Int, - street: String, - name: Name, + number: Int, + street: String, + name: Name, optName: Option[Name], - list: List[String], - sets: Set[Int], - map: Map[Int, String] + list: List[String], + sets: Set[Int], + map: Map[Int, String] ) extends Udt } diff --git a/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContext.scala b/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContext.scala index 615aa7027..edc564577 100644 --- a/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContext.scala +++ b/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContext.scala @@ -5,40 +5,38 @@ import io.getquill.context.jdbc._ object DoobieContext { - class H2[+N <: NamingStrategy](val naming: N) - extends DoobieContextBase[H2Dialect, N] - with H2JdbcTypes[H2Dialect, N] { - val idiom: H2Dialect = H2Dialect + class H2[+N <: NamingStrategy](val naming: N) extends DoobieContextBase[H2Dialect, N] with H2JdbcTypes[H2Dialect, N] { + val idiom: H2Dialect = H2Dialect } class MySQL[+N <: NamingStrategy](val naming: N) - extends DoobieContextBase[MySQLDialect, N] - with MysqlJdbcTypes[MySQLDialect, N] { - val idiom: MySQLDialect = MySQLDialect + extends DoobieContextBase[MySQLDialect, N] + with MysqlJdbcTypes[MySQLDialect, N] { + val idiom: MySQLDialect = MySQLDialect } class Oracle[+N <: NamingStrategy](val naming: N) - extends DoobieContextBase[OracleDialect, N] - with OracleJdbcTypes[OracleDialect, N] { - val idiom: OracleDialect = OracleDialect + extends DoobieContextBase[OracleDialect, N] + with OracleJdbcTypes[OracleDialect, N] { + val idiom: OracleDialect = OracleDialect } class Postgres[+N <: NamingStrategy](val naming: N) - extends DoobieContextBase[PostgresDialect, N] - with PostgresJdbcTypes[PostgresDialect, N] { - val idiom: PostgresDialect = PostgresDialect + extends DoobieContextBase[PostgresDialect, N] + with PostgresJdbcTypes[PostgresDialect, N] { + val idiom: PostgresDialect = PostgresDialect } class SQLite[+N <: NamingStrategy](val naming: N) - extends DoobieContextBase[SqliteDialect, N] - with SqliteJdbcTypes[SqliteDialect, N] { - val idiom: SqliteDialect = SqliteDialect + extends DoobieContextBase[SqliteDialect, N] + with SqliteJdbcTypes[SqliteDialect, N] { + val idiom: SqliteDialect = SqliteDialect } class SQLServer[+N <: NamingStrategy](val naming: N) - extends DoobieContextBase[SQLServerDialect, N] - with SqlServerJdbcTypes[SQLServerDialect, N] { - val idiom: SQLServerDialect = SQLServerDialect + extends DoobieContextBase[SQLServerDialect, N] + with SqlServerJdbcTypes[SQLServerDialect, N] { + val idiom: SQLServerDialect = SQLServerDialect } } diff --git a/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala b/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala index 20809b17e..21a0c7567 100644 --- a/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala +++ b/quill-doobie/src/main/scala/io/getquill/doobie/DoobieContextBase.scala @@ -28,17 +28,17 @@ import scala.annotation.targetName /** Base trait from which vendor-specific variants are derived. */ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] - extends JdbcContextTypes[Dialect, Naming] + extends JdbcContextTypes[Dialect, Naming] with ProtoContextSecundus[Dialect, Naming] with ContextVerbStream[Dialect, Naming] { - override type Result[A] = ConnectionIO[A] - override type RunQueryResult[A] = List[A] - override type RunQuerySingleResult[A] = A - override type StreamResult[A] = Stream[ConnectionIO, A] - override type RunActionResult = Long - override type RunActionReturningResult[A] = A - override type RunBatchActionResult = List[Long] + override type Result[A] = ConnectionIO[A] + override type RunQueryResult[A] = List[A] + override type RunQuerySingleResult[A] = A + override type StreamResult[A] = Stream[ConnectionIO, A] + override type RunActionResult = Long + override type RunActionReturningResult[A] = A + override type RunBatchActionResult = List[Long] override type RunBatchActionReturningResult[A] = List[A] override type Runner = Unit @@ -47,23 +47,34 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] @targetName("runQueryDefault") inline def run[T](inline quoted: Quoted[Query[T]]): ConnectionIO[List[T]] = InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ConnectionIO[List[T]] = InternalApi.runQuery(quoted, wrap) + inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ConnectionIO[List[T]] = + InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): ConnectionIO[T] = InternalApi.runQuerySingle(quoted) @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): ConnectionIO[Long] = InternalApi.runAction(quoted) @targetName("runActionReturning") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ConnectionIO[T] = InternalApi.runActionReturning[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ConnectionIO[T] = + InternalApi.runActionReturning[E, T](quoted) @targetName("runActionReturningMany") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ConnectionIO[List[T]] = InternalApi.runActionReturningMany[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ConnectionIO[List[T]] = + InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ConnectionIO[List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ConnectionIO[List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) @targetName("runBatchActionDefault") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[Long]] = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[Long]] = + InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ConnectionIO[List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ConnectionIO[List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) @targetName("runBatchActionReturningDefault") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[T]] = InternalApi.runBatchActionReturning(quoted, 1) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ConnectionIO[List[T]] = + InternalApi.runBatchActionReturning(quoted, 1) // Logging behavior should be identical to JdbcContextBase.scala, which includes a couple calls // to log.underlying below. @@ -74,9 +85,9 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] private def prepareAndLog( sql: String, - p: Prepare, - )( - implicit connection: Connection + p: Prepare + )(implicit + connection: Connection ): PreparedStatementIO[Unit] = FPS.raw(p(_, connection)).flatMap { case (params, _) => FPS.delay(log.logQuery(sql, params)) } @@ -84,7 +95,7 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] override def executeQuery[A]( sql: String, prepare: Prepare = identityPrepare, - extractor: Extractor[A] = identityExtractor, + extractor: Extractor[A] = identityExtractor )( info: ExecutionInfo, dc: Runner @@ -101,7 +112,7 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] override def executeQuerySingle[A]( sql: String, prepare: Prepare = identityPrepare, - extractor: Extractor[A] = identityExtractor, + extractor: Extractor[A] = identityExtractor )( info: ExecutionInfo, dc: Runner @@ -119,7 +130,7 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] fetchSize: Option[Int], sql: String, prepare: Prepare = identityPrepare, - extractor: Extractor[A] = identityExtractor, + extractor: Extractor[A] = identityExtractor )( info: ExecutionInfo, dc: Runner @@ -130,14 +141,14 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] HC.stream( sql, prepareAndLog(sql, prepare)(connection), - fetchSize.getOrElse(DefaultChunkSize), + fetchSize.getOrElse(DefaultChunkSize) )(extractorToRead(extractor)(connection)) } yield result override def executeAction( sql: String, - prepare: Prepare = identityPrepare, - )(info: ExecutionInfo, dc: Runner): ConnectionIO[Long] = + prepare: Prepare = identityPrepare + )(info: ExecutionInfo, dc: Runner): ConnectionIO[Long] = HC.prepareStatement(sql) { useConnection { implicit connection => prepareAndLog(sql, prepare) *> @@ -157,10 +168,10 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[A], - returningBehavior: ReturnAction, + returningBehavior: ReturnAction )( info: ExecutionInfo, - dc: Runner, + dc: Runner ): ConnectionIO[A] = executeActionReturningMany[A](sql, prepare, extractor, returningBehavior)(info, dc).map(handleSingleResult(sql, _)) @@ -168,10 +179,10 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[A], - returningBehavior: ReturnAction, + returningBehavior: ReturnAction )( info: ExecutionInfo, - dc: Runner, + dc: Runner ): ConnectionIO[List[A]] = prepareConnections[List[A]](returningBehavior)(sql) { useConnection { implicit connection => @@ -183,9 +194,9 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] private def prepareBatchAndLog( sql: String, - p: Prepare, - )( - implicit connection: Connection + p: Prepare + )(implicit + connection: Connection ): PreparedStatementIO[Unit] = FPS.raw(p(_, connection)) flatMap { case (params, _) => FPS.delay(log.logBatchItem(sql, params)) @@ -210,30 +221,29 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] override def executeBatchActionReturning[A]( groups: List[BatchGroupReturning], - extractor: Extractor[A], + extractor: Extractor[A] )( info: ExecutionInfo, dc: Runner - ): ConnectionIO[List[A]] = groups.flatTraverse { - case BatchGroupReturning(sql, returningBehavior, preps) => - prepareConnections(returningBehavior)(sql) { - - useConnection { implicit connection => - for { - _ <- FPS.delay(log.underlying.debug("Batch: {}", sql)) - _ <- preps.traverse(prepareBatchAndLog(sql, _) *> FPS.addBatch) - _ <- HPS.executeBatch - r <- HPS.getGeneratedKeys(HRS.list(extractor)) - } yield r - } + ): ConnectionIO[List[A]] = groups.flatTraverse { case BatchGroupReturning(sql, returningBehavior, preps) => + prepareConnections(returningBehavior)(sql) { + + useConnection { implicit connection => + for { + _ <- FPS.delay(log.underlying.debug("Batch: {}", sql)) + _ <- preps.traverse(prepareBatchAndLog(sql, _) *> FPS.addBatch) + _ <- HPS.executeBatch + r <- HPS.getGeneratedKeys(HRS.list(extractor)) + } yield r } + } } // Turn an extractor into a `Read` so we can use the existing resultset. private implicit def extractorToRead[A]( ex: Extractor[A] - )( - implicit connection: Connection + )(implicit + connection: Connection ): Read[A] = new Read[A](Nil, (rs, _) => ex(rs, connection)) // Nothing to do here. @@ -247,7 +257,7 @@ trait DoobieContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] protected val effect = null - def wrap[T](t: => T): Free[ConnectionOp, T] = Free.pure(t) + def wrap[T](t: => T): Free[ConnectionOp, T] = Free.pure(t) def push[A, B](result: Free[ConnectionOp, A])(f: A => B): Free[ConnectionOp, B] = result.map(f(_)) def seq[A](list: List[Free[ConnectionOp, A]]): Free[ConnectionOp, List[A]] = list.sequence[[L] =>> Free[ConnectionOp, L], A] diff --git a/quill-doobie/src/test/scala/io/getquill/doobie/PeopleDoobieReturningSpec.scala b/quill-doobie/src/test/scala/io/getquill/doobie/PeopleDoobieReturningSpec.scala index c14478d28..51e43ea95 100644 --- a/quill-doobie/src/test/scala/io/getquill/doobie/PeopleDoobieReturningSpec.scala +++ b/quill-doobie/src/test/scala/io/getquill/doobie/PeopleDoobieReturningSpec.scala @@ -19,8 +19,7 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { import cats.effect.unsafe.implicits.global // A transactor that always rolls back. - lazy val xa = Transactor - .after + lazy val xa = Transactor.after .set( Transactor.fromDriverManager[IO]( "org.postgresql.Driver", @@ -31,7 +30,7 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { HC.commit ) - val testContext = new DoobieContext.Postgres(Literal) + val testContext = new DoobieContext.Postgres(Literal) val context: testContext.type = testContext import testContext._ @@ -46,7 +45,7 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { import `Ex 0 insert.returning(_.generatedColumn) mod`._ val (id, output) = (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (id, output)).transact(xa).unsafeRunSync() @@ -58,7 +57,7 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { val (product, output) = (for { product <- testContext.run(op) - output <- testContext.run(get) + output <- testContext.run(get) } yield (product, output)).transact(xa).unsafeRunSync() output mustEqual result(product) @@ -68,7 +67,7 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ val (id, output) = (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (id, output)).transact(xa).unsafeRunSync() @@ -80,8 +79,8 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { val output = (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield output).transact(xa).unsafeRunSync() output.toSet mustEqual result.toSet @@ -92,8 +91,8 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { val output = (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield output).transact(xa).unsafeRunSync() output.toSet mustEqual result.toSet @@ -104,8 +103,8 @@ class PeopleDoobieReturningSpec extends PeopleReturningSpec { val output = (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield output).transact(xa).unsafeRunSync() output.toSet mustEqual result.toSet diff --git a/quill-doobie/src/test/scala/io/getquill/doobie/PostgresDoobieContextSuite.scala b/quill-doobie/src/test/scala/io/getquill/doobie/PostgresDoobieContextSuite.scala index 314b9e54a..7e108fe4a 100644 --- a/quill-doobie/src/test/scala/io/getquill/doobie/PostgresDoobieContextSuite.scala +++ b/quill-doobie/src/test/scala/io/getquill/doobie/PostgresDoobieContextSuite.scala @@ -17,8 +17,7 @@ class PostgresDoobieContextSuite extends AnyFreeSpec with Matchers { import cats.effect.unsafe.implicits.global // A transactor that always rolls back. - lazy val xa = Transactor - .after + lazy val xa = Transactor.after .set( Transactor.fromDriverManager[IO]( "org.postgresql.Driver", @@ -26,7 +25,7 @@ class PostgresDoobieContextSuite extends AnyFreeSpec with Matchers { "postgres", System.getenv("POSTGRES_PASSWORD") ), - HC.rollback, + HC.rollback ) val dc = new DoobieContext.Postgres[Literal](Literal) @@ -70,7 +69,7 @@ class PostgresDoobieContextSuite extends AnyFreeSpec with Matchers { query[Country].filter(_.name like pat).update(_.name -> "foo") } } - val actual = dc.run(stmt).transact(xa).unsafeRunSync() + val actual = dc.run(stmt).transact(xa).unsafeRunSync() val expected = List(2L, 1L) actual mustEqual expected } @@ -103,4 +102,4 @@ class PostgresDoobieContextSuite extends AnyFreeSpec with Matchers { // actual mustEqual expected // } -} \ No newline at end of file +} diff --git a/quill-doobie/src/test/scala/io/getquill/doobie/issue/Issue1067.scala b/quill-doobie/src/test/scala/io/getquill/doobie/issue/Issue1067.scala index 243ac240d..c5d25262a 100644 --- a/quill-doobie/src/test/scala/io/getquill/doobie/issue/Issue1067.scala +++ b/quill-doobie/src/test/scala/io/getquill/doobie/issue/Issue1067.scala @@ -26,10 +26,10 @@ class Issue1067 extends AnyFreeSpec with Matchers { case class Country(name: String, indepYear: Option[Short]) "Issue1067 - correctly select many countries, with a null in last position" in { - val stmt = quote(query[Country]) + val stmt = quote(query[Country]) val actual = dc.run(stmt).transact(xa).unsafeRunSync() actual.count(_.indepYear.isDefined) mustEqual 3 actual.count(_.indepYear.isEmpty) mustEqual 1 } -} \ No newline at end of file +} diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala b/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala index 209b4fc7f..4da69ac92 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/ZioJdbcContexts.scala @@ -1,34 +1,42 @@ package io.getquill import com.typesafe.config.Config -import io.getquill.context.jdbc.{ H2JdbcTypes, MysqlJdbcTypes, OracleJdbcTypes, PostgresJdbcTypes, SqlServerExecuteOverride, SqlServerJdbcTypes, SqliteJdbcTypes } +import io.getquill.context.jdbc.{ + H2JdbcTypes, + MysqlJdbcTypes, + OracleJdbcTypes, + PostgresJdbcTypes, + SqlServerExecuteOverride, + SqlServerJdbcTypes, + SqliteJdbcTypes +} import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.context.qzio.{ ZioJdbcContext, ZioJdbcUnderlyingContext } +import io.getquill.context.qzio.{ZioJdbcContext, ZioJdbcUnderlyingContext} import io.getquill.util.LoadConfig import javax.sql.DataSource import io.getquill.context.json.PostgresJsonExtensions class PostgresZioJdbcContext[+N <: NamingStrategy](val naming: N) - extends ZioJdbcContext[PostgresDialect, N] - with PostgresJdbcTypes[PostgresDialect, N] - with PostgresJsonExtensions { + extends ZioJdbcContext[PostgresDialect, N] + with PostgresJdbcTypes[PostgresDialect, N] + with PostgresJsonExtensions { val idiom: PostgresDialect = PostgresDialect val connDelegate: ZioJdbcUnderlyingContext[PostgresDialect, N] = new PostgresZioJdbcContext.Underlying[N](naming) } object PostgresZioJdbcContext { class Underlying[+N <: NamingStrategy](val naming: N) - extends ZioJdbcUnderlyingContext[PostgresDialect, N] - with PostgresJdbcTypes[PostgresDialect, N] - with PostgresJsonExtensions { + extends ZioJdbcUnderlyingContext[PostgresDialect, N] + with PostgresJdbcTypes[PostgresDialect, N] + with PostgresJsonExtensions { val idiom: PostgresDialect = PostgresDialect } } class SqlServerZioJdbcContext[+N <: NamingStrategy](val naming: N) - extends ZioJdbcContext[SQLServerDialect, N] - with SqlServerJdbcTypes[SQLServerDialect, N] { + extends ZioJdbcContext[SQLServerDialect, N] + with SqlServerJdbcTypes[SQLServerDialect, N] { val idiom: SQLServerDialect = SQLServerDialect val connDelegate: ZioJdbcUnderlyingContext[SQLServerDialect, N] = new SqlServerZioJdbcContext.Underlying[N](naming) @@ -36,69 +44,69 @@ class SqlServerZioJdbcContext[+N <: NamingStrategy](val naming: N) object SqlServerZioJdbcContext { class Underlying[+N <: NamingStrategy](val naming: N) - extends ZioJdbcUnderlyingContext[SQLServerDialect, N] - with SqlServerJdbcTypes[SQLServerDialect, N] - with SqlServerExecuteOverride[N] { + extends ZioJdbcUnderlyingContext[SQLServerDialect, N] + with SqlServerJdbcTypes[SQLServerDialect, N] + with SqlServerExecuteOverride[N] { val idiom: SQLServerDialect = SQLServerDialect } } class H2ZioJdbcContext[+N <: NamingStrategy](val naming: N) - extends ZioJdbcContext[H2Dialect, N] - with H2JdbcTypes[H2Dialect, N] { + extends ZioJdbcContext[H2Dialect, N] + with H2JdbcTypes[H2Dialect, N] { val idiom: H2Dialect = H2Dialect val connDelegate: ZioJdbcUnderlyingContext[H2Dialect, N] = new H2ZioJdbcContext.Underlying[N](naming) } object H2ZioJdbcContext { class Underlying[+N <: NamingStrategy](val naming: N) - extends ZioJdbcUnderlyingContext[H2Dialect, N] - with H2JdbcTypes[H2Dialect, N] { + extends ZioJdbcUnderlyingContext[H2Dialect, N] + with H2JdbcTypes[H2Dialect, N] { val idiom: H2Dialect = H2Dialect } } class MysqlZioJdbcContext[+N <: NamingStrategy](val naming: N) - extends ZioJdbcContext[MySQLDialect, N] - with MysqlJdbcTypes[MySQLDialect, N] { + extends ZioJdbcContext[MySQLDialect, N] + with MysqlJdbcTypes[MySQLDialect, N] { val idiom: MySQLDialect = MySQLDialect val connDelegate: ZioJdbcUnderlyingContext[MySQLDialect, N] = new MysqlZioJdbcContext.Underlying[N](naming) } object MysqlZioJdbcContext { class Underlying[+N <: NamingStrategy](val naming: N) - extends ZioJdbcUnderlyingContext[MySQLDialect, N] - with MysqlJdbcTypes[MySQLDialect, N] { + extends ZioJdbcUnderlyingContext[MySQLDialect, N] + with MysqlJdbcTypes[MySQLDialect, N] { val idiom: MySQLDialect = MySQLDialect } } class SqliteZioJdbcContext[+N <: NamingStrategy](val naming: N) - extends ZioJdbcContext[SqliteDialect, N] - with SqliteJdbcTypes[SqliteDialect, N] { + extends ZioJdbcContext[SqliteDialect, N] + with SqliteJdbcTypes[SqliteDialect, N] { val idiom: SqliteDialect = SqliteDialect val connDelegate: ZioJdbcUnderlyingContext[SqliteDialect, N] = new SqliteZioJdbcContext.Underlying[N](naming) } object SqliteZioJdbcContext { class Underlying[+N <: NamingStrategy](val naming: N) - extends ZioJdbcUnderlyingContext[SqliteDialect, N] - with SqliteJdbcTypes[SqliteDialect, N] { + extends ZioJdbcUnderlyingContext[SqliteDialect, N] + with SqliteJdbcTypes[SqliteDialect, N] { val idiom: SqliteDialect = SqliteDialect } } class OracleZioJdbcContext[+N <: NamingStrategy](val naming: N) - extends ZioJdbcContext[OracleDialect, N] - with OracleJdbcTypes[OracleDialect, N] { + extends ZioJdbcContext[OracleDialect, N] + with OracleJdbcTypes[OracleDialect, N] { val idiom: OracleDialect = OracleDialect val connDelegate: ZioJdbcUnderlyingContext[OracleDialect, N] = new OracleZioJdbcContext.Underlying[N](naming) } object OracleZioJdbcContext { class Underlying[+N <: NamingStrategy](val naming: N) - extends ZioJdbcUnderlyingContext[OracleDialect, N] - with OracleJdbcTypes[OracleDialect, N] { + extends ZioJdbcUnderlyingContext[OracleDialect, N] + with OracleJdbcTypes[OracleDialect, N] { val idiom: OracleDialect = OracleDialect } } diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala index 75acea91c..1a64f0494 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/ZioJdbc.scala @@ -15,10 +15,10 @@ import javax.sql.DataSource import zio.Scope object ZioJdbc { - type QIO[T] = ZIO[DataSource, SQLException, T] + type QIO[T] = ZIO[DataSource, SQLException, T] type QStream[T] = ZStream[DataSource, SQLException, T] - type QCIO[T] = ZIO[Connection, SQLException, T] + type QCIO[T] = ZIO[Connection, SQLException, T] type QCStream[T] = ZStream[Connection, SQLException, T] object QIO { @@ -35,8 +35,11 @@ object ZioJdbc { ZLayer.scoped { for { blockingExecutor <- ZIO.blockingExecutor - ds <- ZIO.service[DataSource] - r <- ZioJdbc.scopedBestEffort(ZIO.attempt(ds.getConnection)).refineToOrDie[SQLException].onExecutor(blockingExecutor) + ds <- ZIO.service[DataSource] + r <- ZioJdbc + .scopedBestEffort(ZIO.attempt(ds.getConnection)) + .refineToOrDie[SQLException] + .onExecutor(blockingExecutor) } yield r } @@ -65,11 +68,13 @@ object ZioJdbc { fromJdbcConfigClosable(JdbcContextConfig(LoadConfig(prefix))) @deprecated("Use Quill.DataSource.fromJdbcConfigClosable instead", "3.3.0") - def fromJdbcConfigClosable(jdbcContextConfig: => JdbcContextConfig): ZLayer[Any, Throwable, DataSource with Closeable] = + def fromJdbcConfigClosable( + jdbcContextConfig: => JdbcContextConfig + ): ZLayer[Any, Throwable, DataSource with Closeable] = ZLayer.scoped { for { conf <- ZIO.attempt(jdbcContextConfig) - ds <- scopedBestEffort(ZIO.attempt(conf.dataSource)) + ds <- scopedBestEffort(ZIO.attempt(conf.dataSource)) } yield ds } } @@ -92,8 +97,8 @@ object ZioJdbc { (for { r <- ZIO.environment[R] q <- qzio - .provideSomeLayer[DataSource](ZLayer.succeedEnvironment(r)) - .provideEnvironment(ZEnvironment(implicitEnv.env)) + .provideSomeLayer[DataSource](ZLayer.succeedEnvironment(r)) + .provideEnvironment(ZEnvironment(implicitEnv.env)) } yield q).refineToOrDie[SQLException] } @@ -109,16 +114,19 @@ object ZioJdbc { def implicitDS(implicit implicitEnv: Implicit[DataSource]): ZIO[Any, SQLException, T] = (for { q <- qzio - .provideSomeLayer(Quill.Connection.acquireScoped) - .provideEnvironment(ZEnvironment(implicitEnv.env)) + .provideSomeLayer(Quill.Connection.acquireScoped) + .provideEnvironment(ZEnvironment(implicitEnv.env)) } yield q).refineToOrDie[SQLException] } implicit class QuillZioExt[T, R](qzio: ZIO[Connection with R, Throwable, T])(implicit tag: Tag[R]) { + /** - * Change `Connection` of a QIO to `DataSource with Closeable` by providing a `DataSourceLayer.live` instance - * which will grab a connection from the data-source, perform the QIO operation, and the immediately release the connection. - * This is used for data-sources that have pooled connections e.g. Hikari. + * Change `Connection` of a QIO to `DataSource with Closeable` by providing + * a `DataSourceLayer.live` instance which will grab a connection from the + * data-source, perform the QIO operation, and the immediately release the + * connection. This is used for data-sources that have pooled connections + * e.g. Hikari. * {{{ * def ds: DataSource with Closeable = ... * run(query[Person]).onDataSource.provide(Has(ds)) @@ -128,22 +136,28 @@ object ZioJdbc { (for { r <- ZIO.environment[R] q <- qzio - .provideSomeLayer[Connection](ZLayer.succeedEnvironment(r)) - .provideSomeLayer(Quill.Connection.acquireScoped) + .provideSomeLayer[Connection](ZLayer.succeedEnvironment(r)) + .provideSomeLayer(Quill.Connection.acquireScoped) } yield q).refineToOrDie[SQLException] } /** - * This is the same as `ZIO.fromAutoCloseable` but if the `.close()` fails it will log `"close() of resource failed"` - * and continue instead of immediately throwing an error in the ZIO die-channel. That is because for JDBC purposes, - * a failure on the connection close is usually a recoverable failure. In the cases where it happens it occurs - * as the byproduct of a bad state (e.g. failing to close a transaction before closing the connection or failing to - * release a stale connection) which will eventually cause other operations (i.e. future reads/writes) to fail - * that have not occurred yet. + * This is the same as `ZIO.fromAutoCloseable` but if the `.close()` fails it + * will log `"close() of resource failed"` and continue instead of immediately + * throwing an error in the ZIO die-channel. That is because for JDBC + * purposes, a failure on the connection close is usually a recoverable + * failure. In the cases where it happens it occurs as the byproduct of a bad + * state (e.g. failing to close a transaction before closing the connection or + * failing to release a stale connection) which will eventually cause other + * operations (i.e. future reads/writes) to fail that have not occurred yet. */ private[getquill] def scopedBestEffort[R, E, A <: AutoCloseable](effect: ZIO[R, E, A]): ZIO[R with Scope, E, A] = ZIO.acquireRelease(effect)(resource => - ZIO.attemptBlocking(resource.close()).tapError(e => ZIO.attempt(logger.underlying.error(s"close() of resource failed", e)).ignore).ignore) + ZIO + .attemptBlocking(resource.close()) + .tapError(e => ZIO.attempt(logger.underlying.error(s"close() of resource failed", e)).ignore) + .ignore + ) private[getquill] val streamBlocker: ZStream[Any, Nothing, Any] = ZStream.scoped { diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/json/PostgresJsonExtensions.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/json/PostgresJsonExtensions.scala index 8e2ebc3b0..047fa4a75 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/json/PostgresJsonExtensions.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/json/PostgresJsonExtensions.scala @@ -1,12 +1,12 @@ package io.getquill.context.json -import io.getquill.context.jdbc.{ Decoders, Encoders, JdbcContextTypes } -import zio.json.{ JsonDecoder, JsonEncoder } +import io.getquill.context.jdbc.{Decoders, Encoders, JdbcContextTypes} +import zio.json.{JsonDecoder, JsonEncoder} import zio.json.ast.Json import java.sql.Types -import scala.reflect.{ ClassTag, classTag } -import io.getquill.{ JsonValue, JsonbValue } +import scala.reflect.{ClassTag, classTag} +import io.getquill.{JsonValue, JsonbValue} trait PostgresJsonExtensions extends Encoders with Decoders { this: JdbcContextTypes[_, _] => @@ -20,56 +20,68 @@ trait PostgresJsonExtensions extends Encoders with Decoders { implicit def jsonbEntityDecoder[T: ClassTag](implicit jsonDecoder: JsonDecoder[T]): Decoder[JsonbValue[T]] = entityDecoder[T, JsonbValue[T]](JsonbValue(_))("jsonb", jsonDecoder) - implicit def jsonAstEncoder: Encoder[JsonValue[Json]] = astEncoder(_.value.toString(), "json") - implicit def jsonAstDecoder: Decoder[JsonValue[Json]] = astDecoder(JsonValue(_)) + implicit def jsonAstEncoder: Encoder[JsonValue[Json]] = astEncoder(_.value.toString(), "json") + implicit def jsonAstDecoder: Decoder[JsonValue[Json]] = astDecoder(JsonValue(_)) implicit def jsonbAstEncoder: Encoder[JsonbValue[Json]] = astEncoder(_.value.toString(), "jsonb") implicit def jsonbAstDecoder: Decoder[JsonbValue[Json]] = astDecoder(JsonbValue(_)) def astEncoder[Wrapper](valueToString: Wrapper => String, jsonType: String): Encoder[Wrapper] = - encoder(Types.VARCHAR, (index, jsonValue, row) => { - val obj = new org.postgresql.util.PGobject() - obj.setType(jsonType) - val jsonString = valueToString(jsonValue) - obj.setValue(jsonString) - row.setObject(index, obj) - }) + encoder( + Types.VARCHAR, + (index, jsonValue, row) => { + val obj = new org.postgresql.util.PGobject() + obj.setType(jsonType) + val jsonString = valueToString(jsonValue) + obj.setValue(jsonString) + row.setObject(index, obj) + } + ) def astDecoder[Wrapper](valueFromString: Json => Wrapper): Decoder[Wrapper] = - decoder((index, row, session) => { - val obj = row.getObject(index, classOf[org.postgresql.util.PGobject]) + decoder { (index, row, session) => + val obj = row.getObject(index, classOf[org.postgresql.util.PGobject]) val jsonString = obj.getValue Json.decoder.decodeJson(jsonString) match { case Right(value) => valueFromString(value) - case Left(error) => throw new IllegalArgumentException(s"Error decoding the Json value '${jsonString}' into a zio.json.ast.Json. Message: ${error}") + case Left(error) => + throw new IllegalArgumentException( + s"Error decoding the Json value '${jsonString}' into a zio.json.ast.Json. Message: ${error}" + ) } - }) + } def entityEncoder[JsValue, Wrapper]( unwrap: Wrapper => JsValue )( - jsonType: String, + jsonType: String, jsonEncoder: JsonEncoder[JsValue] ): Encoder[Wrapper] = - encoder(Types.VARCHAR, (index, jsonValue, row) => { - val obj = new org.postgresql.util.PGobject() - obj.setType(jsonType) - val jsonString = jsonEncoder.encodeJson(unwrap(jsonValue), None).toString - obj.setValue(jsonString) - row.setObject(index, obj) - }) + encoder( + Types.VARCHAR, + (index, jsonValue, row) => { + val obj = new org.postgresql.util.PGobject() + obj.setType(jsonType) + val jsonString = jsonEncoder.encodeJson(unwrap(jsonValue), None).toString + obj.setValue(jsonString) + row.setObject(index, obj) + } + ) def entityDecoder[JsValue: ClassTag, Wrapper]( wrap: JsValue => Wrapper )( - jsonType: String, + jsonType: String, jsonDecoder: JsonDecoder[JsValue] ): Decoder[Wrapper] = - decoder((index, row, session) => { - val obj = row.getObject(index, classOf[org.postgresql.util.PGobject]) + decoder { (index, row, session) => + val obj = row.getObject(index, classOf[org.postgresql.util.PGobject]) val jsonString = obj.getValue jsonDecoder.decodeJson(jsonString) match { case Right(value) => wrap(value) - case Left(error) => throw new IllegalArgumentException(s"Error decoding the Json value '${jsonString}' into a ${classTag[JsValue]}. Message: ${error}") + case Left(error) => + throw new IllegalArgumentException( + s"Error decoding the Json value '${jsonString}' into a ${classTag[JsValue]}. Message: ${error}" + ) } - }) + } } diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ResultSetIterator.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ResultSetIterator.scala index ca32a45bb..e11957803 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ResultSetIterator.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ResultSetIterator.scala @@ -1,15 +1,17 @@ package io.getquill.context.qzio -import java.sql.{ Connection, ResultSet } +import java.sql.{Connection, ResultSet} /** - * In order to allow a ResultSet to be consumed by an Observable, a ResultSet iterator must be created. - * Since Quill provides a extractor for an individual ResultSet row, a single row can easily be cached - * in memory. This allows for a straightforward implementation of a hasNext method. + * In order to allow a ResultSet to be consumed by an Observable, a ResultSet + * iterator must be created. Since Quill provides a extractor for an individual + * ResultSet row, a single row can easily be cached in memory. This allows for a + * straightforward implementation of a hasNext method. */ -class ResultSetIterator[T](rs: ResultSet, conn: Connection, extractor: (ResultSet, Connection) => T) extends BufferedIterator[T] { +class ResultSetIterator[T](rs: ResultSet, conn: Connection, extractor: (ResultSet, Connection) => T) + extends BufferedIterator[T] { - private[this] var state = 0 // 0: no data, 1: cached, 2: finished + private[this] var state = 0 // 0: no data, 1: cached, 2: finished private[this] var cached: T = null.asInstanceOf[T] protected[this] final def finished(): T = { @@ -28,12 +30,11 @@ class ResultSetIterator[T](rs: ResultSet, conn: Connection, extractor: (ResultSe else throw new NoSuchElementException("head on empty iterator") } - private def prefetchIfNeeded(): Unit = { + private def prefetchIfNeeded(): Unit = if (state == 0) { cached = fetchNext() if (state == 0) state = 1 } - } def hasNext: Boolean = { prefetchIfNeeded() diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala index 5a33c2f23..72d0ee65e 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcContext.scala @@ -18,90 +18,111 @@ import zio.ZIO.attemptBlocking import zio.ZIO.blocking /** - * Quill context that executes JDBC queries inside of ZIO. Unlike most other contexts - * that require passing in a Data Source, this context takes in a java.sql.Connection - * as a resource dependency which can be provided later (see `ZioJdbc` for helper methods - * that assist in doing this). + * Quill context that executes JDBC queries inside of ZIO. Unlike most other + * contexts that require passing in a Data Source, this context takes in a + * java.sql.Connection as a resource dependency which can be provided later (see + * `ZioJdbc` for helper methods that assist in doing this). * - * The resource dependency itself is just a `Has[Connection]`. Since this is frequently used - * The type `QIO[T]` i.e. Quill-IO has been defined as an alias for `ZIO[Has[Connection], SQLException, T]`. + * The resource dependency itself is just a `Has[Connection]`. Since this is + * frequently used The type `QIO[T]` i.e. Quill-IO has been defined as an alias + * for `ZIO[Has[Connection], SQLException, T]`. * - * Since in most JDBC use-cases, a connection-pool datasource i.e. Hikari is used it would actually - * be much more useful to interact with `ZIO[Has[DataSource], SQLException, T]`. - * The extension method `.onDataSource` in `io.getquill.context.ZioJdbc.QuillZioExt` will perform this conversion - * (for even more brevity use `onDS` which is an alias for this method). - * {{ - * import ZioJdbc._ - * val zioDs = Quill.DataSource.fromPrefix("testPostgresDB") - * MyZioContext.run(query[Person]).onDataSource.provideCustomLayer(zioDS) - * }} + * Since in most JDBC use-cases, a connection-pool datasource i.e. Hikari is + * used it would actually be much more useful to interact with + * `ZIO[Has[DataSource], SQLException, T]`. The extension method `.onDataSource` + * in `io.getquill.context.ZioJdbc.QuillZioExt` will perform this conversion + * (for even more brevity use `onDS` which is an alias for this method). {{ + * import ZioJdbc._ val zioDs = Quill.DataSource.fromPrefix("testPostgresDB") + * MyZioContext.run(query[Person]).onDataSource.provideCustomLayer(zioDS) }} * - * If you are using a Plain Scala app however, you will need to manually run it e.g. using zio.Runtime - * {{ - * Runtime.default.unsafeRun(MyZioContext.run(query[Person]).ContextTranslateProtoprovideLayer(zioDS)) + * If you are using a Plain Scala app however, you will need to manually run it + * e.g. using zio.Runtime {{ + * Runtime.default.unsafeRun(MyZioContext.run(query[Person]).ContextTranslateProtoprovideLayer(zioDS)) * }} * - * Note however that the one exception to these cases are the `prepare` methods where a `ZIO[Has[Connection], SQLException, PreparedStatement]` - * is being returned. In those situations the acquire-action-release pattern does not make any sense because the `PrepareStatement` - * is only held open while it's host-connection exists. + * Note however that the one exception to these cases are the `prepare` methods + * where a `ZIO[Has[Connection], SQLException, PreparedStatement]` is being + * returned. In those situations the acquire-action-release pattern does not + * make any sense because the `PrepareStatement` is only held open while it's + * host-connection exists. */ -abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends ZioContext[Dialect, Naming] - with JdbcContextTypes[Dialect, Naming] - with ProtoContextSecundus[Dialect, Naming] - with ContextVerbStream[Dialect, Naming] - with ZioPrepareContext[Dialect, Naming] - with ZioTranslateContext[Dialect, Naming] { - - override type StreamResult[T] = ZStream[Environment, Error, T] - override type Result[T] = ZIO[Environment, Error, T] - override type RunQueryResult[T] = List[T] - override type RunQuerySingleResult[T] = T - override type RunActionResult = Long - override type RunActionReturningResult[T] = T - override type RunBatchActionResult = List[Long] +abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] + extends ZioContext[Dialect, Naming] + with JdbcContextTypes[Dialect, Naming] + with ProtoContextSecundus[Dialect, Naming] + with ContextVerbStream[Dialect, Naming] + with ZioPrepareContext[Dialect, Naming] + with ZioTranslateContext[Dialect, Naming] { + + override type StreamResult[T] = ZStream[Environment, Error, T] + override type Result[T] = ZIO[Environment, Error, T] + override type RunQueryResult[T] = List[T] + override type RunQuerySingleResult[T] = T + override type RunActionResult = Long + override type RunActionReturningResult[T] = T + override type RunBatchActionResult = List[Long] override type RunBatchActionReturningResult[T] = List[T] // Needed for TranslateContext in ProtoQuill - override type Runner = Unit + override type Runner = Unit override type TranslateRunner = Unit override protected def context: Runner = () - def translateContext: TranslateRunner = () + def translateContext: TranslateRunner = () - override type Error = SQLException + override type Error = SQLException override type Environment = DataSource - override type PrepareRow = PreparedStatement - override type ResultRow = ResultSet + override type PrepareRow = PreparedStatement + override type ResultRow = ResultSet - override type TranslateResult[T] = ZIO[Environment, Error, T] - override type PrepareQueryResult = QCIO[PrepareRow] - override type PrepareActionResult = QCIO[PrepareRow] + override type TranslateResult[T] = ZIO[Environment, Error, T] + override type PrepareQueryResult = QCIO[PrepareRow] + override type PrepareActionResult = QCIO[PrepareRow] override type PrepareBatchActionResult = QCIO[List[PrepareRow]] - override type Session = Connection + override type Session = Connection @targetName("runQueryDefault") - inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[DataSource, SQLException, List[T]] = InternalApi.runQueryDefault(quoted) + inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[DataSource, SQLException, List[T]] = + InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ZIO[DataSource, SQLException, List[T]] = InternalApi.runQuery(quoted, wrap) + inline def run[T]( + inline quoted: Quoted[Query[T]], + inline wrap: OuterSelectWrap + ): ZIO[DataSource, SQLException, List[T]] = InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): ZIO[DataSource, SQLException, T] = InternalApi.runQuerySingle(quoted) @targetName("runAction") - inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[DataSource, SQLException, Long] = InternalApi.runAction(quoted) + inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[DataSource, SQLException, Long] = + InternalApi.runAction(quoted) @targetName("runActionReturning") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ZIO[DataSource, SQLException, T] = InternalApi.runActionReturning[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ZIO[DataSource, SQLException, T] = + InternalApi.runActionReturning[E, T](quoted) @targetName("runActionReturningMany") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[DataSource, SQLException, List[T]] = InternalApi.runActionReturningMany[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[DataSource, SQLException, List[T]] = + InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[DataSource, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ZIO[DataSource, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) @targetName("runBatchActionDefault") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[DataSource, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[DataSource, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[DataSource, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ZIO[DataSource, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) @targetName("runBatchActionReturningDefault") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[DataSource, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[DataSource, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) /** - * Since we are immediately executing the ZIO that creates this fiber ref whether it is global is not really relevant since it does not really use scope - * However if it were used for something else it would be scoped to the fiber-ref of the zio-jdbc context's creator i.e. the global scope. + * Since we are immediately executing the ZIO that creates this fiber ref + * whether it is global is not really relevant since it does not really use + * scope However if it were used for something else it would be scoped to the + * fiber-ref of the zio-jdbc context's creator i.e. the global scope. */ val currentConnection: FiberRef[Option[Connection]] = Unsafe.unsafe { implicit unsafe => @@ -119,32 +140,77 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] e def executeAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): QIO[Long] = onConnection(connDelegate.executeAction(sql, prepare)(info, dc)) - def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): QIO[List[T]] = + def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)( + info: ExecutionInfo, + dc: Runner + ): QIO[List[T]] = onConnection(connDelegate.executeQuery[T](sql, prepare, extractor)(info, dc)) - override def executeQuerySingle[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): QIO[T] = + override def executeQuerySingle[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): QIO[T] = onConnection(connDelegate.executeQuerySingle[T](sql, prepare, extractor)(info, dc)) - override def translateQueryEndpoint[T](statement: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor, prettyPrint: Boolean = false)(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[String] = + override def translateQueryEndpoint[T]( + statement: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor, + prettyPrint: Boolean = false + )(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[String] = onConnection(connDelegate.translateQueryEndpoint[T](statement, prepare, extractor, prettyPrint)(executionInfo, dc)) - override def translateBatchQueryEndpoint(groups: List[BatchGroup], prettyPrint: Boolean = false)(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[List[String]] = - onConnection(connDelegate.translateBatchQueryEndpoint(groups.asInstanceOf[List[ZioJdbcContext.this.connDelegate.BatchGroup]], prettyPrint)(executionInfo, dc)) + override def translateBatchQueryEndpoint( + groups: List[BatchGroup], + prettyPrint: Boolean = false + )(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[List[String]] = + onConnection( + connDelegate.translateBatchQueryEndpoint( + groups.asInstanceOf[List[ZioJdbcContext.this.connDelegate.BatchGroup]], + prettyPrint + )(executionInfo, dc) + ) - def streamQuery[T](fetchSize: Option[Int], sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): QStream[T] = + def streamQuery[T]( + fetchSize: Option[Int], + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): QStream[T] = onConnectionStream(connDelegate.streamQuery[T](fetchSize, sql, prepare, extractor)(info, dc)) - def executeActionReturning[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): QIO[O] = + def executeActionReturning[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): QIO[O] = onConnection(connDelegate.executeActionReturning[O](sql, prepare, extractor, returningBehavior)(info, dc)) - def executeActionReturningMany[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): QIO[List[O]] = + def executeActionReturningMany[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): QIO[List[O]] = onConnection(connDelegate.executeActionReturningMany[O](sql, prepare, extractor, returningBehavior)(info, dc)) def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): QIO[List[Long]] = - onConnection(connDelegate.executeBatchAction(groups.asInstanceOf[List[ZioJdbcContext.this.connDelegate.BatchGroup]])(info, dc)) + onConnection( + connDelegate.executeBatchAction(groups.asInstanceOf[List[ZioJdbcContext.this.connDelegate.BatchGroup]])(info, dc) + ) - def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): QIO[List[T]] = - onConnection(connDelegate.executeBatchActionReturning[T](groups.asInstanceOf[List[ZioJdbcContext.this.connDelegate.BatchGroupReturning]], extractor)(info, dc)) + def executeBatchActionReturning[T]( + groups: List[BatchGroupReturning], + extractor: Extractor[T] + )(info: ExecutionInfo, dc: Runner): QIO[List[T]] = + onConnection( + connDelegate.executeBatchActionReturning[T]( + groups.asInstanceOf[List[ZioJdbcContext.this.connDelegate.BatchGroupReturning]], + extractor + )(info, dc) + ) override def prepareQuery(sql: String, prepare: Prepare)(info: ExecutionInfo, dc: Runner): QCIO[PreparedStatement] = connDelegate.prepareQuery(sql, prepare)(info, dc) @@ -152,34 +218,32 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] e override def prepareAction(sql: String, prepare: Prepare)(info: ExecutionInfo, dc: Runner): QCIO[PreparedStatement] = connDelegate.prepareAction(sql, prepare)(info, dc) - override def prepareBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): QCIO[List[PreparedStatement]] = + override def prepareBatchAction( + groups: List[BatchGroup] + )(info: ExecutionInfo, dc: Runner): QCIO[List[PreparedStatement]] = connDelegate.prepareBatchAction(groups.asInstanceOf[List[ZioJdbcContext.this.connDelegate.BatchGroup]])(info, dc) private[getquill] def prepareParams(statement: String, prepare: Prepare): QIO[Seq[String]] = onConnection(connDelegate.prepareParams(statement, prepare)) /** - * Execute instructions in a transaction. For example, to add a Person row to the database and return - * the contents of the Person table immediately after that: + * Execute instructions in a transaction. For example, to add a Person row to + * the database and return the contents of the Person table immediately after + * that: * {{{ * val a = run(query[Person].insert(Person(...)): ZIO[Has[DataSource], SQLException, Long] * val b = run(query[Person]): ZIO[Has[DataSource], SQLException, Person] * transaction(a *> b): ZIO[Has[DataSource], SQLException, Person] * }}} * - * The order of operations run in the case that a new connection needs to be aquired are as follows: - *
-   *   getDS from env,
-   *   acquire-connection,
-   *     set-no-autocommit(connection),
-   *       put-into-fiberref(connection),
-   *         op - the corresponding execute_ method which will execute and pull connection from the fiberref,
-   *       remove-from-fiberref(connection),
-   *     set-prev-autocommit(connection),
-   *   release-conn
-   * 
+ * The order of operations run in the case that a new connection needs to be + * aquired are as follows:
 getDS from env, acquire-connection,
+   * set-no-autocommit(connection), put-into-fiberref(connection), op - the
+   * corresponding execute_ method which will execute and pull connection from
+   * the fiberref, remove-from-fiberref(connection),
+   * set-prev-autocommit(connection), release-conn 
*/ - def transaction[R <: DataSource, A](op: ZIO[R, Throwable, A]): ZIO[R, Throwable, A] = { + def transaction[R <: DataSource, A](op: ZIO[R, Throwable, A]): ZIO[R, Throwable, A] = blocking(currentConnection.get.flatMap { // We can just return the op in the case that there is already a connection set on the fiber ref // because the op is execute___ which will lookup the connection from the fiber ref via onConnection/onConnectionStream @@ -187,31 +251,30 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] e case Some(connection) => op case None => val connection = for { - env <- ZIO.service[DataSource] + env <- ZIO.service[DataSource] connection <- scopedBestEffort(attemptBlocking(env.getConnection)) // Get the current value of auto-commit prevAutoCommit <- attemptBlocking(connection.getAutoCommit) // Disable auto-commit since we need to be able to roll back. Once everything is done, set it // to whatever the previous value was. _ <- ZIO.acquireRelease(attemptBlocking(connection.setAutoCommit(false))) { _ => - attemptBlocking(connection.setAutoCommit(prevAutoCommit)).orDie - } + attemptBlocking(connection.setAutoCommit(prevAutoCommit)).orDie + } _ <- ZIO.acquireRelease(currentConnection.set(Some(connection))) { _ => - // Note. We are failing the fiber if auto-commit reset fails. For some circumstances this may be too aggresive. - // If the connection pool e.g. Hikari resets this property for a recycled connection anyway doing it here - // might not be necessary - currentConnection.set(None) - } + // Note. We are failing the fiber if auto-commit reset fails. For some circumstances this may be too aggresive. + // If the connection pool e.g. Hikari resets this property for a recycled connection anyway doing it here + // might not be necessary + currentConnection.set(None) + } // Once the `use` of this outer-Scoped is done, rollback the connection if needed _ <- ZIO.addFinalizerExit { - case Success(_) => blocking(ZIO.succeed(connection.commit())) - case Failure(cause) => blocking(ZIO.succeed(connection.rollback())) - } + case Success(_) => blocking(ZIO.succeed(connection.commit())) + case Failure(cause) => blocking(ZIO.succeed(connection.rollback())) + } } yield () ZIO.scoped(connection *> op) }) - } private def onConnection[T](qlio: ZIO[Connection, SQLException, T]): ZIO[DataSource, SQLException, T] = currentConnection.get.flatMap { @@ -221,14 +284,16 @@ abstract class ZioJdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] e blocking(qlio.provideLayer(Quill.Connection.acquireScoped)) } - private def onConnectionStream[T](qstream: ZStream[Connection, SQLException, T]): ZStream[DataSource, SQLException, T] = + private def onConnectionStream[T]( + qstream: ZStream[Connection, SQLException, T] + ): ZStream[DataSource, SQLException, T] = streamBlocker *> ZStream.fromZIO(currentConnection.get).flatMap { case Some(connection) => qstream.provideEnvironment(ZEnvironment(connection)) case None => (for { env <- ZStream.scoped(Quill.Connection.acquireScoped.build) - r <- qstream.provideEnvironment(env) + r <- qstream.provideEnvironment(env) } yield (r)).refineToOrDie[SQLException] } -} \ No newline at end of file +} diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala index 4d6c34789..7eb834a4d 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioJdbcUnderlyingContext.scala @@ -3,98 +3,152 @@ package io.getquill.context.qzio import io.getquill.context.ZioJdbc._ import io.getquill.context.jdbc.JdbcContextVerbExecute import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.context.{ ExecutionInfo, ContextVerbStream } +import io.getquill.context.{ExecutionInfo, ContextVerbStream} import io.getquill.util.ContextLogger import io.getquill._ -import zio.Exit.{ Failure, Success } +import zio.Exit.{Failure, Success} import zio.ZIO.blocking -import zio.stream.{ Stream, ZStream } -import zio.{ Cause, Task, UIO, ZIO, StackTrace } +import zio.stream.{Stream, ZStream} +import zio.{Cause, Task, UIO, ZIO, StackTrace} -import java.sql.{ Array => _, _ } +import java.sql.{Array => _, _} import javax.sql.DataSource import scala.reflect.ClassTag import scala.util.Try import scala.annotation.targetName -abstract class ZioJdbcUnderlyingContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends ZioContext[Dialect, Naming] - with JdbcContextVerbExecute[Dialect, Naming] - with ContextVerbStream[Dialect, Naming] - with ZioPrepareContext[Dialect, Naming] - with ZioTranslateContext[Dialect, Naming] { +abstract class ZioJdbcUnderlyingContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] + extends ZioContext[Dialect, Naming] + with JdbcContextVerbExecute[Dialect, Naming] + with ContextVerbStream[Dialect, Naming] + with ZioPrepareContext[Dialect, Naming] + with ZioTranslateContext[Dialect, Naming] { override private[getquill] val logger = ContextLogger(classOf[ZioJdbcUnderlyingContext[_, _]]) - override type Error = SQLException - override type Environment = Session - override type PrepareRow = PreparedStatement - override type ResultRow = ResultSet - override type RunActionResult = Long - override type RunActionReturningResult[T] = T - override type RunBatchActionResult = List[Long] + override type Error = SQLException + override type Environment = Session + override type PrepareRow = PreparedStatement + override type ResultRow = ResultSet + override type RunActionResult = Long + override type RunActionReturningResult[T] = T + override type RunBatchActionResult = List[Long] override type RunBatchActionReturningResult[T] = List[T] - override type Runner = Unit - override type TranslateRunner = Unit + override type Runner = Unit + override type TranslateRunner = Unit override protected def context: Runner = () - def translateContext: TranslateRunner = () + def translateContext: TranslateRunner = () @targetName("runQueryDefault") - inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[Connection, SQLException, List[T]] = InternalApi.runQueryDefault(quoted) + inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[Connection, SQLException, List[T]] = + InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ZIO[Connection, SQLException, List[T]] = InternalApi.runQuery(quoted, wrap) + inline def run[T]( + inline quoted: Quoted[Query[T]], + inline wrap: OuterSelectWrap + ): ZIO[Connection, SQLException, List[T]] = InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): ZIO[Connection, SQLException, T] = InternalApi.runQuerySingle(quoted) @targetName("runAction") - inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[Connection, SQLException, Long] = InternalApi.runAction(quoted) + inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[Connection, SQLException, Long] = + InternalApi.runAction(quoted) @targetName("runActionReturning") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ZIO[Connection, SQLException, T] = InternalApi.runActionReturning[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ZIO[Connection, SQLException, T] = + InternalApi.runActionReturning[E, T](quoted) @targetName("runActionReturningMany") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[Connection, SQLException, List[T]] = InternalApi.runActionReturningMany[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[Connection, SQLException, List[T]] = + InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Connection, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ZIO[Connection, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) @targetName("runBatchActionDefault") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Connection, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[Connection, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Connection, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ZIO[Connection, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) @targetName("runBatchActionReturningDefault") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[Connection, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[Connection, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) // Need explicit return-type annotations due to scala/bug#8356. Otherwise macro system will not understand Result[Long]=Task[Long] etc... - override def executeAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): QCIO[Long] = + override def executeAction(sql: String, prepare: Prepare = identityPrepare)( + info: ExecutionInfo, + dc: Runner + ): QCIO[Long] = super.executeAction(sql, prepare)(info, dc) - override def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): QCIO[List[T]] = + override def executeQuery[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): QCIO[List[T]] = super.executeQuery(sql, prepare, extractor)(info, dc) - override def executeQuerySingle[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): QCIO[T] = + override def executeQuerySingle[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): QCIO[T] = super.executeQuerySingle(sql, prepare, extractor)(info, dc) - override def executeActionReturning[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): QCIO[O] = + override def executeActionReturning[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): QCIO[O] = super.executeActionReturning(sql, prepare, extractor, returningBehavior)(info, dc) - override def executeActionReturningMany[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): QCIO[List[O]] = + override def executeActionReturningMany[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): QCIO[List[O]] = super.executeActionReturningMany(sql, prepare, extractor, returningBehavior)(info, dc) override def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): QCIO[List[Long]] = super.executeBatchAction(groups)(info, dc) - override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): QCIO[List[T]] = + override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])( + info: ExecutionInfo, + dc: Runner + ): QCIO[List[T]] = super.executeBatchActionReturning(groups, extractor)(info, dc) override def prepareQuery(sql: String, prepare: Prepare)(info: ExecutionInfo, dc: Runner): QCIO[PreparedStatement] = super.prepareQuery(sql, prepare)(info, dc) override def prepareAction(sql: String, prepare: Prepare)(info: ExecutionInfo, dc: Runner): QCIO[PreparedStatement] = super.prepareAction(sql, prepare)(info, dc) - override def prepareBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): QCIO[List[PreparedStatement]] = + override def prepareBatchAction( + groups: List[BatchGroup] + )(info: ExecutionInfo, dc: Runner): QCIO[List[PreparedStatement]] = super.prepareBatchAction(groups)(info, dc) - override def translateQueryEndpoint[T](statement: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor, prettyPrint: Boolean = false)(info: ExecutionInfo, dc: Runner): QCIO[String] = + override def translateQueryEndpoint[T]( + statement: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor, + prettyPrint: Boolean = false + )(info: ExecutionInfo, dc: Runner): QCIO[String] = super.translateQueryEndpoint(statement, prepare, extractor, prettyPrint)(info, dc) - override def translateBatchQueryEndpoint(groups: List[BatchGroup], prettyPrint: Boolean = false)(info: ExecutionInfo, dc: Runner): QCIO[List[String]] = + override def translateBatchQueryEndpoint(groups: List[BatchGroup], prettyPrint: Boolean = false)( + info: ExecutionInfo, + dc: Runner + ): QCIO[List[String]] = super.translateBatchQueryEndpoint(groups, prettyPrint)(info, dc) /** ZIO Contexts do not managed DB connections so this is a no-op */ override def close(): Unit = () - protected def withConnection[T](f: Connection => Result[T]): Result[T] = throw new IllegalArgumentException("Not Used") + protected def withConnection[T](f: Connection => Result[T]): Result[T] = throw new IllegalArgumentException( + "Not Used" + ) // Primary method used to actually run Quill context commands query, insert, update, delete and others override protected def withConnectionWrapped[T](f: Connection => T): QCIO[T] = blocking { for { - conn <- ZIO.service[Connection] + conn <- ZIO.service[Connection] result <- sqlEffect(f(conn)) } yield result } @@ -102,44 +156,57 @@ abstract class ZioJdbcUnderlyingContext[+Dialect <: SqlIdiom, +Naming <: NamingS private def sqlEffect[T](t: => T): QCIO[T] = ZIO.attempt(t).refineToOrDie[SQLException] /** - * Note that for ZIO 2.0 since the env is covariant, R can be a subtype of connection because if there are other with-clauses - * they can be generalized to Something <: Connection. E.g. `Connection with OtherStuff` generalizes to `Something <: Connection`. + * Note that for ZIO 2.0 since the env is covariant, R can be a subtype of + * connection because if there are other with-clauses they can be generalized + * to Something <: Connection. E.g. `Connection with OtherStuff` generalizes + * to `Something <: Connection`. */ - private[getquill] def withoutAutoCommit[R <: Connection, A, E <: Throwable: ClassTag](f: ZIO[R, E, A]): ZIO[R, E, A] = { + private[getquill] def withoutAutoCommit[R <: Connection, A, E <: Throwable: ClassTag](f: ZIO[R, E, A]): ZIO[R, E, A] = for { - conn <- ZIO.service[Connection] + conn <- ZIO.service[Connection] autoCommitPrev = conn.getAutoCommit - r <- ZIO.acquireReleaseWith(sqlEffect(conn))(conn => ZIO.succeed(conn.setAutoCommit(autoCommitPrev))) { conn => - sqlEffect(conn.setAutoCommit(false)).flatMap(_ => f) - }.refineToOrDie[E] + r <- ZIO + .acquireReleaseWith(sqlEffect(conn))(conn => ZIO.succeed(conn.setAutoCommit(autoCommitPrev))) { conn => + sqlEffect(conn.setAutoCommit(false)).flatMap(_ => f) + } + .refineToOrDie[E] } yield r - } - private[getquill] def streamWithoutAutoCommit[A](f: ZStream[Connection, Throwable, A]): ZStream[Connection, Throwable, A] = { + private[getquill] def streamWithoutAutoCommit[A]( + f: ZStream[Connection, Throwable, A] + ): ZStream[Connection, Throwable, A] = for { - conn <- ZStream.service[Connection] + conn <- ZStream.service[Connection] autoCommitPrev = conn.getAutoCommit - r <- ZStream.acquireReleaseWith(ZIO.attempt(conn.setAutoCommit(false)))(_ => { - ZIO.succeed(conn.setAutoCommit(autoCommitPrev)) - }).flatMap(_ => f) + r <- ZStream + .acquireReleaseWith(ZIO.attempt(conn.setAutoCommit(false))) { _ => + ZIO.succeed(conn.setAutoCommit(autoCommitPrev)) + } + .flatMap(_ => f) } yield r - } - def transaction[R <: Connection, A](f: ZIO[R, Throwable, A]): ZIO[R, Throwable, A] = { - ZIO.environment[R].flatMap(env => - blocking(withoutAutoCommit( - f.onExit { - case Success(_) => - ZIO.succeed(env.get[Connection].commit()) - case Failure(cause) => - ZIO.succeed(env.get[Connection].rollback()).foldCauseZIO( - // NOTE: cause.flatMap(Cause.die) means wrap up the throwable failures into die failures, can only do if E param is Throwable (can also do .orDie at the end) - rollbackFailCause => ZIO.failCause(cause.flatMap(Cause.die(_, StackTrace.none)) ++ rollbackFailCause), - _ => ZIO.failCause(cause.flatMap(Cause.die(_, StackTrace.none))) // or ZIO.halt(cause).orDie - ) - }.provideEnvironment(env) - ))) - } + def transaction[R <: Connection, A](f: ZIO[R, Throwable, A]): ZIO[R, Throwable, A] = + ZIO + .environment[R] + .flatMap(env => + blocking( + withoutAutoCommit( + f.onExit { + case Success(_) => + ZIO.succeed(env.get[Connection].commit()) + case Failure(cause) => + ZIO + .succeed(env.get[Connection].rollback()) + .foldCauseZIO( + // NOTE: cause.flatMap(Cause.die) means wrap up the throwable failures into die failures, can only do if E param is Throwable (can also do .orDie at the end) + rollbackFailCause => + ZIO.failCause(cause.flatMap(Cause.die(_, StackTrace.none)) ++ rollbackFailCause), + _ => ZIO.failCause(cause.flatMap(Cause.die(_, StackTrace.none))) // or ZIO.halt(cause).orDie + ) + }.provideEnvironment(env) + ) + ) + ) def probingDataSource: Option[DataSource] = None @@ -169,9 +236,14 @@ abstract class ZioJdbcUnderlyingContext[+Dialect <: SqlIdiom, +Naming <: NamingS stmt } - def streamQuery[T](fetchSize: Option[Int], sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): QCStream[T] = { + def streamQuery[T]( + fetchSize: Option[Int], + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): QCStream[T] = { def prepareStatement(conn: Connection) = { - val stmt = prepareStatementForStreaming(sql, conn, fetchSize) + val stmt = prepareStatementForStreaming(sql, conn, fetchSize) val (params, ps) = prepare(stmt, conn) logger.logQuery(sql, params) ps @@ -181,37 +253,37 @@ abstract class ZioJdbcUnderlyingContext[+Dialect <: SqlIdiom, +Naming <: NamingS ZStream.scoped { for { conn <- ZIO.service[Connection] - ps <- scopedBestEffort(ZIO.attempt(prepareStatement(conn))) - rs <- scopedBestEffort(ZIO.attempt(ps.executeQuery())) + ps <- scopedBestEffort(ZIO.attempt(prepareStatement(conn))) + rs <- scopedBestEffort(ZIO.attempt(ps.executeQuery())) } yield (conn, ps, rs) } val outStream: ZStream[Connection, Throwable, T] = - scopedEnv.flatMap { - case (conn, ps, rs) => - val iter = new ResultSetIterator(rs, conn, extractor) - fetchSize match { - // TODO Assuming chunk size is fetch size. Not sure if this is optimal. - // Maybe introduce some switches to control this? - case Some(size) => - ZStream.fromIterator(iter, size) - case None => - ZStream.fromIterator(new ResultSetIterator(rs, conn, extractor)) - } + scopedEnv.flatMap { case (conn, ps, rs) => + val iter = new ResultSetIterator(rs, conn, extractor) + fetchSize match { + // TODO Assuming chunk size is fetch size. Not sure if this is optimal. + // Maybe introduce some switches to control this? + case Some(size) => + ZStream.fromIterator(iter, size) + case None => + ZStream.fromIterator(new ResultSetIterator(rs, conn, extractor)) + } } // Run the chunked fetch on the blocking pool streamBlocker *> streamWithoutAutoCommit(outStream).refineToOrDie[SQLException] } - override private[getquill] def prepareParams(statement: String, prepare: Prepare): QCIO[Seq[String]] = { + override private[getquill] def prepareParams(statement: String, prepare: Prepare): QCIO[Seq[String]] = withConnectionWrapped { conn => prepare(conn.prepareStatement(statement), conn)._1.reverse.map(prepareParam) } - } // Generally these are not used in the ZIO context but have implementations in case they are needed override def wrap[T](t: => T): ZIO[Connection, SQLException, T] = QCIO(t) - override def push[A, B](result: ZIO[Connection, SQLException, A])(f: A => B): ZIO[Connection, SQLException, B] = result.map(f) - override def seq[A](f: List[ZIO[Connection, SQLException, A]]): ZIO[Connection, SQLException, List[A]] = ZIO.collectAll(f) + override def push[A, B](result: ZIO[Connection, SQLException, A])(f: A => B): ZIO[Connection, SQLException, B] = + result.map(f) + override def seq[A](f: List[ZIO[Connection, SQLException, A]]): ZIO[Connection, SQLException, List[A]] = + ZIO.collectAll(f) } diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioPrepareContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioPrepareContext.scala index 349b7b4ff..708ab938c 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioPrepareContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/context/qzio/ZioPrepareContext.scala @@ -1,54 +1,66 @@ package io.getquill.context.qzio import io.getquill.NamingStrategy -import io.getquill.context.{ ExecutionInfo, ContextVerbPrepare } +import io.getquill.context.{ExecutionInfo, ContextVerbPrepare} import io.getquill.context.ZioJdbc._ import io.getquill.context.sql.idiom.SqlIdiom import io.getquill.util.ContextLogger -import zio.{ Task, ZIO } +import zio.{Task, ZIO} -import java.sql.{ Connection, PreparedStatement, ResultSet, SQLException } +import java.sql.{Connection, PreparedStatement, ResultSet, SQLException} -trait ZioPrepareContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends ZioContext[Dialect, Naming] - with ContextVerbPrepare[Dialect, Naming] { +trait ZioPrepareContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] + extends ZioContext[Dialect, Naming] + with ContextVerbPrepare[Dialect, Naming] { private[getquill] val logger = ContextLogger(classOf[ZioPrepareContext[_, _]]) - override type PrepareRow = PreparedStatement - override type ResultRow = ResultSet - override type PrepareQueryResult = QCIO[PrepareRow] - override type PrepareActionResult = QCIO[PrepareRow] + override type PrepareRow = PreparedStatement + override type ResultRow = ResultSet + override type PrepareQueryResult = QCIO[PrepareRow] + override type PrepareActionResult = QCIO[PrepareRow] override type PrepareBatchActionResult = QCIO[List[PrepareRow]] - override type Session = Connection + override type Session = Connection - def prepareQuery(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): PrepareQueryResult = + def prepareQuery(sql: String, prepare: Prepare = identityPrepare)( + info: ExecutionInfo, + dc: Runner + ): PrepareQueryResult = prepareSingle(sql, prepare)(info, dc) - def prepareAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): PrepareActionResult = + def prepareAction(sql: String, prepare: Prepare = identityPrepare)( + info: ExecutionInfo, + dc: Runner + ): PrepareActionResult = prepareSingle(sql, prepare)(info, dc) - /** Execute SQL on connection and return prepared statement. Closes the statement in a bracket. */ - def prepareSingle(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): QCIO[PreparedStatement] = { + /** + * Execute SQL on connection and return prepared statement. Closes the + * statement in a bracket. + */ + def prepareSingle( + sql: String, + prepare: Prepare = identityPrepare + )(info: ExecutionInfo, dc: Runner): QCIO[PreparedStatement] = (for { conn <- ZIO.service[Session] stmt <- ZIO.attempt(conn.prepareStatement(sql)) ps <- ZIO.attempt { - val (params, ps) = prepare(stmt, conn) - logger.logQuery(sql, params) - ps - } + val (params, ps) = prepare(stmt, conn) + logger.logQuery(sql, params) + ps + } } yield ps).refineToOrDie[SQLException] - } def prepareBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): PrepareBatchActionResult = - ZIO.collectAll[Connection, Throwable, PrepareRow, List] { - val batches = groups.flatMap { - case BatchGroup(sql, prepares) => + ZIO + .collectAll[Connection, Throwable, PrepareRow, List] { + val batches = groups.flatMap { case BatchGroup(sql, prepares) => prepares.map(sql -> _) - } - batches.map { - case (sql, prepare) => + } + batches.map { case (sql, prepare) => prepareSingle(sql, prepare)(info, dc) + } } - }.refineToOrDie[SQLException] + .refineToOrDie[SQLException] } diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/Quill.scala b/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/Quill.scala index 63664fce7..8536bc4a5 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/Quill.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/Quill.scala @@ -7,25 +7,28 @@ import io.getquill.context.ZioJdbc.scopedBestEffort import io.getquill.context.jdbc._ import io.getquill.context.sql.idiom.SqlIdiom import io.getquill.util.LoadConfig -import zio.{ Tag, ZIO, ZLayer } +import zio.{Tag, ZIO, ZLayer} import java.io.Closeable -import java.sql.{ Connection, SQLException } +import java.sql.{Connection, SQLException} import javax.sql.DataSource import io.getquill.context.json.PostgresJsonExtensions object Quill { class Postgres[+N <: NamingStrategy](val naming: N, override val ds: DataSource) - extends Quill[PostgresDialect, N] with PostgresJdbcTypes[PostgresDialect, N] with PostgresJsonExtensions { + extends Quill[PostgresDialect, N] + with PostgresJdbcTypes[PostgresDialect, N] + with PostgresJsonExtensions { val idiom: PostgresDialect = PostgresDialect - val dsDelegate = new PostgresZioJdbcContext[N](naming) + val dsDelegate = new PostgresZioJdbcContext[N](naming) } /** Postgres ZIO Context without JDBC Encoders */ class PostgresLite[+N <: NamingStrategy](val naming: N, override val ds: DataSource) - extends Quill[PostgresDialect, N] with PostgresJdbcTypes[PostgresDialect, N] { + extends Quill[PostgresDialect, N] + with PostgresJdbcTypes[PostgresDialect, N] { val idiom: PostgresDialect = PostgresDialect - val dsDelegate = new PostgresZioJdbcContext[N](naming) + val dsDelegate = new PostgresZioJdbcContext[N](naming) } object Postgres { @@ -35,9 +38,10 @@ object Quill { } class SqlServer[+N <: NamingStrategy](val naming: N, override val ds: DataSource) - extends Quill[SQLServerDialect, N] with SqlServerJdbcTypes[SQLServerDialect, N] { + extends Quill[SQLServerDialect, N] + with SqlServerJdbcTypes[SQLServerDialect, N] { val idiom: SQLServerDialect = SQLServerDialect - val dsDelegate = new SqlServerZioJdbcContext[N](naming) + val dsDelegate = new SqlServerZioJdbcContext[N](naming) } object SqlServer { def apply[N <: NamingStrategy](naming: N, ds: DataSource) = new SqlServer[N](naming, ds) @@ -46,9 +50,10 @@ object Quill { } class H2[+N <: NamingStrategy](val naming: N, override val ds: DataSource) - extends Quill[H2Dialect, N] with H2JdbcTypes[H2Dialect, N] { + extends Quill[H2Dialect, N] + with H2JdbcTypes[H2Dialect, N] { val idiom: H2Dialect = H2Dialect - val dsDelegate = new H2ZioJdbcContext[N](naming) + val dsDelegate = new H2ZioJdbcContext[N](naming) } object H2 { def apply[N <: NamingStrategy](naming: N, ds: DataSource) = new H2[N](naming, ds) @@ -57,9 +62,10 @@ object Quill { } class Mysql[+N <: NamingStrategy](val naming: N, override val ds: DataSource) - extends Quill[MySQLDialect, N] with MysqlJdbcTypes[MySQLDialect, N] { + extends Quill[MySQLDialect, N] + with MysqlJdbcTypes[MySQLDialect, N] { val idiom: MySQLDialect = MySQLDialect - val dsDelegate = new MysqlZioJdbcContext[N](naming) + val dsDelegate = new MysqlZioJdbcContext[N](naming) } object Mysql { def apply[N <: NamingStrategy](naming: N, ds: DataSource) = new Mysql[N](naming, ds) @@ -68,9 +74,10 @@ object Quill { } class Sqlite[+N <: NamingStrategy](val naming: N, override val ds: DataSource) - extends Quill[SqliteDialect, N] with SqliteJdbcTypes[SqliteDialect, N] { + extends Quill[SqliteDialect, N] + with SqliteJdbcTypes[SqliteDialect, N] { val idiom: SqliteDialect = SqliteDialect - val dsDelegate = new SqliteZioJdbcContext[N](naming) + val dsDelegate = new SqliteZioJdbcContext[N](naming) } object Sqlite { def apply[N <: NamingStrategy](naming: N, ds: DataSource) = new Sqlite[N](naming, ds) @@ -79,9 +86,10 @@ object Quill { } class Oracle[+N <: NamingStrategy](val naming: N, override val ds: DataSource) - extends Quill[OracleDialect, N] with OracleJdbcTypes[OracleDialect, N] { + extends Quill[OracleDialect, N] + with OracleJdbcTypes[OracleDialect, N] { val idiom: OracleDialect = OracleDialect - val dsDelegate = new OracleZioJdbcContext[N](naming) + val dsDelegate = new OracleZioJdbcContext[N](naming) } object Oracle { def apply[N <: NamingStrategy](naming: N, ds: DataSource) = new Oracle[N](naming, ds) @@ -94,8 +102,11 @@ object Quill { ZLayer.scoped { for { blockingExecutor <- ZIO.blockingExecutor - ds <- ZIO.service[DataSource] - r <- ZioJdbc.scopedBestEffort(ZIO.attempt(ds.getConnection)).refineToOrDie[SQLException].onExecutor(blockingExecutor) + ds <- ZIO.service[DataSource] + r <- ZioJdbc + .scopedBestEffort(ZIO.attempt(ds.getConnection)) + .refineToOrDie[SQLException] + .onExecutor(blockingExecutor) } yield r } } @@ -119,11 +130,13 @@ object Quill { def fromPrefixClosable(prefix: String): ZLayer[Any, Throwable, DataSource with Closeable] = fromJdbcConfigClosable(JdbcContextConfig(LoadConfig(prefix))) - def fromJdbcConfigClosable(jdbcContextConfig: => JdbcContextConfig): ZLayer[Any, Throwable, DataSource with Closeable] = + def fromJdbcConfigClosable( + jdbcContextConfig: => JdbcContextConfig + ): ZLayer[Any, Throwable, DataSource with Closeable] = ZLayer.scoped { for { conf <- ZIO.attempt(jdbcContextConfig) - ds <- scopedBestEffort(ZIO.attempt(conf.dataSource)) + ds <- scopedBestEffort(ZIO.attempt(conf.dataSource)) } yield ds } } diff --git a/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala b/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala index 48ef8a595..9d27c8d66 100644 --- a/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala +++ b/quill-jdbc-zio/src/main/scala/io/getquill/jdbczio/QuillBaseContext.scala @@ -1,48 +1,49 @@ package io.getquill.jdbczio import io.getquill._ -import io.getquill.context.{ ContextVerbStream, ExecutionInfo, ProtoContextSecundus } +import io.getquill.context.{ContextVerbStream, ExecutionInfo, ProtoContextSecundus} import io.getquill.context.jdbc.JdbcContextTypes -import io.getquill.context.qzio.{ ZioContext, ZioJdbcContext, ZioTranslateContext } +import io.getquill.context.qzio.{ZioContext, ZioJdbcContext, ZioTranslateContext} import io.getquill.context.sql.idiom.SqlIdiom -import zio.{ ZEnvironment, ZIO } +import zio.{ZEnvironment, ZIO} import zio.stream.ZStream -import java.sql.{ Connection, PreparedStatement, ResultSet, SQLException } +import java.sql.{Connection, PreparedStatement, ResultSet, SQLException} import javax.sql.DataSource import scala.util.Try import scala.annotation.targetName -trait QuillBaseContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends ZioContext[Dialect, Naming] - with JdbcContextTypes[Dialect, Naming] - with ProtoContextSecundus[Dialect, Naming] - with ContextVerbStream[Dialect, Naming] - with ZioTranslateContext[Dialect, Naming] { +trait QuillBaseContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] + extends ZioContext[Dialect, Naming] + with JdbcContextTypes[Dialect, Naming] + with ProtoContextSecundus[Dialect, Naming] + with ContextVerbStream[Dialect, Naming] + with ZioTranslateContext[Dialect, Naming] { def ds: DataSource - override type StreamResult[T] = ZStream[Environment, Error, T] - override type Result[T] = ZIO[Environment, Error, T] - override type RunQueryResult[T] = List[T] - override type RunQuerySingleResult[T] = T - override type RunActionResult = Long - override type RunActionReturningResult[T] = T - override type RunBatchActionResult = List[Long] + override type StreamResult[T] = ZStream[Environment, Error, T] + override type Result[T] = ZIO[Environment, Error, T] + override type RunQueryResult[T] = List[T] + override type RunQuerySingleResult[T] = T + override type RunActionResult = Long + override type RunActionReturningResult[T] = T + override type RunBatchActionResult = List[Long] override type RunBatchActionReturningResult[T] = List[T] // Needed for TranslateContext in ProtoQuill - override type Runner = Unit + override type Runner = Unit override type TranslateRunner = Unit override protected def context: Runner = () - def translateContext: TranslateRunner = () + def translateContext: TranslateRunner = () - override type Error = SQLException + override type Error = SQLException override type Environment = Any - override type PrepareRow = PreparedStatement - override type ResultRow = ResultSet + override type PrepareRow = PreparedStatement + override type ResultRow = ResultSet override type TranslateResult[T] = ZIO[Environment, Error, T] - override type Session = Connection + override type Session = Connection final lazy val underlying: ZioJdbcContext[Dialect, Naming] = dsDelegate private[getquill] val dsDelegate: ZioJdbcContext[Dialect, Naming] @@ -50,66 +51,131 @@ trait QuillBaseContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends override def close() = () // No Probing in Dotty yet - //override def probe(sql: String): Try[_] = dsDelegate.probe(sql) + // override def probe(sql: String): Try[_] = dsDelegate.probe(sql) @targetName("runQueryDefault") - inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[Any, SQLException, List[T]] = InternalApi.runQueryDefault(quoted) + inline def run[T](inline quoted: Quoted[Query[T]]): ZIO[Any, SQLException, List[T]] = + InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ZIO[Any, SQLException, List[T]] = InternalApi.runQuery(quoted, wrap) + inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): ZIO[Any, SQLException, List[T]] = + InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): ZIO[Any, SQLException, T] = InternalApi.runQuerySingle(quoted) @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): ZIO[Any, SQLException, Long] = InternalApi.runAction(quoted) @targetName("runActionReturning") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ZIO[Any, SQLException, T] = InternalApi.runActionReturning[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ZIO[Any, SQLException, T] = + InternalApi.runActionReturning[E, T](quoted) @targetName("runActionReturningMany") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[Any, SQLException, List[T]] = InternalApi.runActionReturningMany[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ZIO[Any, SQLException, List[T]] = + InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Any, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ZIO[Any, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, rowsPerBatch) @targetName("runBatchActionDefault") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[Any, SQLException, List[Long]] = InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): ZIO[Any, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): ZIO[Any, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) @targetName("runBatchActionReturningDefault") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): ZIO[Any, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) - - def executeAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, Long] = + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]] + ): ZIO[Any, SQLException, List[T]] = InternalApi.runBatchActionReturning(quoted, 1) + + def executeAction(sql: String, prepare: Prepare = identityPrepare)( + info: ExecutionInfo, + dc: Runner + ): ZIO[Any, SQLException, Long] = onDS(dsDelegate.executeAction(sql, prepare)(info, dc)) - def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, List[T]] = + def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)( + info: ExecutionInfo, + dc: Runner + ): ZIO[Any, SQLException, List[T]] = onDS(dsDelegate.executeQuery[T](sql, prepare, extractor)(info, dc)) - override def executeQuerySingle[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, T] = + override def executeQuerySingle[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, T] = onDS(dsDelegate.executeQuerySingle[T](sql, prepare, extractor)(info, dc)) - override def translateQueryEndpoint[T](statement: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor, prettyPrint: Boolean = false)(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[String] = + override def translateQueryEndpoint[T]( + statement: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor, + prettyPrint: Boolean = false + )(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[String] = onDS(dsDelegate.translateQueryEndpoint[T](statement, prepare, extractor, prettyPrint)(executionInfo, dc)) - override def translateBatchQueryEndpoint(groups: List[BatchGroup], prettyPrint: Boolean = false)(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[List[String]] = - onDS(dsDelegate.translateBatchQueryEndpoint(groups.asInstanceOf[List[QuillBaseContext.this.dsDelegate.BatchGroup]], prettyPrint)(executionInfo, dc)) - - def streamQuery[T](fetchSize: Option[Int], sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZStream[Any, SQLException, T] = + override def translateBatchQueryEndpoint( + groups: List[BatchGroup], + prettyPrint: Boolean = false + )(executionInfo: ExecutionInfo, dc: Runner): TranslateResult[List[String]] = + onDS( + dsDelegate.translateBatchQueryEndpoint( + groups.asInstanceOf[List[QuillBaseContext.this.dsDelegate.BatchGroup]], + prettyPrint + )(executionInfo, dc) + ) + + def streamQuery[T]( + fetchSize: Option[Int], + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): ZStream[Any, SQLException, T] = onDSStream(dsDelegate.streamQuery[T](fetchSize, sql, prepare, extractor)(info, dc)) - def executeActionReturning[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, O] = + def executeActionReturning[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, O] = onDS(dsDelegate.executeActionReturning[O](sql, prepare, extractor, returningBehavior)(info, dc)) - def executeActionReturningMany[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, List[O]] = + def executeActionReturningMany[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, List[O]] = onDS(dsDelegate.executeActionReturningMany[O](sql, prepare, extractor, returningBehavior)(info, dc)) - def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, List[Long]] = - onDS(dsDelegate.executeBatchAction(groups.asInstanceOf[List[QuillBaseContext.this.dsDelegate.BatchGroup]])(info, dc)) - - def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, List[T]] = - onDS(dsDelegate.executeBatchActionReturning[T](groups.asInstanceOf[List[QuillBaseContext.this.dsDelegate.BatchGroupReturning]], extractor)(info, dc)) + def executeBatchAction( + groups: List[BatchGroup] + )(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, List[Long]] = + onDS( + dsDelegate.executeBatchAction(groups.asInstanceOf[List[QuillBaseContext.this.dsDelegate.BatchGroup]])(info, dc) + ) + + def executeBatchActionReturning[T]( + groups: List[BatchGroupReturning], + extractor: Extractor[T] + )(info: ExecutionInfo, dc: Runner): ZIO[Any, SQLException, List[T]] = + onDS( + dsDelegate.executeBatchActionReturning[T]( + groups.asInstanceOf[List[QuillBaseContext.this.dsDelegate.BatchGroupReturning]], + extractor + )(info, dc) + ) // Used in translation functions private[getquill] def prepareParams(statement: String, prepare: Prepare): ZIO[Any, SQLException, Seq[String]] = onDS(dsDelegate.prepareParams(statement, prepare)) /** - * Execute instructions in a transaction. For example, to add a Person row to the database and return - * the contents of the Person table immediately after that: + * Execute instructions in a transaction. For example, to add a Person row to + * the database and return the contents of the Person table immediately after + * that: * {{{ * val a = run(query[Person].insert(Person(...)): ZIO[Has[DataSource], SQLException, Long] * val b = run(query[Person]): ZIO[Has[DataSource], SQLException, Person] diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/PeopleZioSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/PeopleZioSpec.scala index 06b256a81..0dad5f23a 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/PeopleZioSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/PeopleZioSpec.scala @@ -10,7 +10,7 @@ trait PeopleZioSpec extends PeopleSpec with ZioSpec { import context._ inline def `Ex 11 query` = quote(query[Person]) - val `Ex 11 expected` = peopleEntries + val `Ex 11 expected` = peopleEntries } trait PeopleZioProxySpec extends PeopleSpec with ZioProxySpec { @@ -18,6 +18,6 @@ trait PeopleZioProxySpec extends PeopleSpec with ZioProxySpec { val context: ZioJdbcContext[_, _] import context._ - val `Ex 11 query` = quote(query[Person]) + val `Ex 11 query` = quote(query[Person]) val `Ex 11 expected` = peopleEntries } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/PrepareZioJdbcSpecBase.scala b/quill-jdbc-zio/src/test/scala/io/getquill/PrepareZioJdbcSpecBase.scala index acf1f6da0..eceaa2c9d 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/PrepareZioJdbcSpecBase.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/PrepareZioJdbcSpecBase.scala @@ -5,11 +5,11 @@ import io.getquill.context.jdbc.ResultSetExtractor import io.getquill.context.sql.ProductSpec import io.getquill.context.qzio.ZioJdbcContext import org.scalactic.Equality -import zio.{ Runtime, Task, ZEnvironment, ZIO } +import zio.{Runtime, Task, ZEnvironment, ZIO} import io.getquill.generic.GenericDecoder import io.getquill.generic.DecodingType.Generic -import java.sql.{ Connection, PreparedStatement, ResultSet } +import java.sql.{Connection, PreparedStatement, ResultSet} import io.getquill.context.qzio.ImplicitSyntax.Implicit import javax.sql.DataSource @@ -25,31 +25,38 @@ trait PrepareZioJdbcSpecBase extends ProductSpec with ZioProxySpec { } } - def productExtractor = (rs: ResultSet, session: Session) => summon[GenericDecoder[context.ResultRow, context.Session, Product, Generic]](0, rs, session) + def productExtractor = (rs: ResultSet, session: Session) => + summon[GenericDecoder[context.ResultRow, context.Session, Product, Generic]](0, rs, session) def withOrderedIds(products: List[Product]) = products.zipWithIndex.map { case (product, id) => product.copy(id = id.toLong + 1) } - def singleInsert(prep: QCIO[PreparedStatement])(implicit runtime: Implicit[DataSource]) = { - prep.flatMap(stmt => - ZIO.attempt(stmt).acquireReleaseWithAuto { stmt => ZIO.attempt(stmt.execute()) }).onDataSource.runSyncUnsafe() - } + def singleInsert(prep: QCIO[PreparedStatement])(implicit runtime: Implicit[DataSource]) = + prep + .flatMap(stmt => ZIO.attempt(stmt).acquireReleaseWithAuto(stmt => ZIO.attempt(stmt.execute()))) + .onDataSource + .runSyncUnsafe() def batchInsert(prep: QCIO[List[PreparedStatement]])(implicit runtime: Implicit[DataSource]) = - prep.flatMap(stmts => - ZIO.collectAll( - stmts.map(stmt => - ZIO.attempt(stmt).acquireReleaseWithAuto { stmt => ZIO.attempt(stmt.execute()) }) - )).onDataSource.runSyncUnsafe() + prep + .flatMap(stmts => + ZIO.collectAll( + stmts.map(stmt => ZIO.attempt(stmt).acquireReleaseWithAuto(stmt => ZIO.attempt(stmt.execute()))) + ) + ) + .onDataSource + .runSyncUnsafe() - def extractResults[T](prepareStatement: QCIO[PreparedStatement])(extractor: (ResultSet, Connection) => T)(implicit runtime: Implicit[DataSource]) = + def extractResults[T]( + prepareStatement: QCIO[PreparedStatement] + )(extractor: (ResultSet, Connection) => T)(implicit runtime: Implicit[DataSource]) = (for { conn <- ZIO.service[Connection] result <- prepareStatement.provideEnvironment(ZEnvironment(conn)).acquireReleaseWithAuto { stmt => - ZIO.attempt(stmt.executeQuery()).acquireReleaseWithAuto { rs => - ZIO.attempt(ResultSetExtractor(rs, stmt.getConnection, extractor)) - } - } + ZIO.attempt(stmt.executeQuery()).acquireReleaseWithAuto { rs => + ZIO.attempt(ResultSetExtractor(rs, stmt.getConnection, extractor)) + } + } } yield result).onDataSource.runSyncUnsafe() def extractProducts(prep: QCIO[PreparedStatement])(implicit runtime: Implicit[DataSource]) = diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/ResultSetIteratorSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/ResultSetIteratorSpec.scala index 8580340e6..70f291069 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/ResultSetIteratorSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/ResultSetIteratorSpec.scala @@ -14,7 +14,7 @@ import javax.sql.DataSource class ResultSetIteratorSpec extends ZioProxySpec { implicit val pool: Implicit[DataSource] = Implicit(io.getquill.postgres.pool) - val ctx = new PostgresZioJdbcContext(Literal) + val ctx = new PostgresZioJdbcContext(Literal) import ctx._ case class Person(name: String, age: Int) @@ -40,28 +40,37 @@ class ResultSetIteratorSpec extends ZioProxySpec { "traverses correctly" in { val results = - ZIO.service[DataSource].mapAttempt(ds => ds.getConnection).acquireReleaseWithAuto { conn => - ZIO.attempt { - val stmt = conn.prepareStatement("select * from person") - val rs = new ResultSetIterator[String](stmt.executeQuery(), conn, extractor = (rs, conn) => { rs.getString(1) }) - val accum = ArrayBuffer[String]() - while (rs.hasNext) accum += rs.next() - accum + ZIO + .service[DataSource] + .mapAttempt(ds => ds.getConnection) + .acquireReleaseWithAuto { conn => + ZIO.attempt { + val stmt = conn.prepareStatement("select * from person") + val rs = + new ResultSetIterator[String](stmt.executeQuery(), conn, extractor = (rs, conn) => { rs.getString(1) }) + val accum = ArrayBuffer[String]() + while (rs.hasNext) accum += rs.next() + accum + } } - }.runSyncUnsafe() + .runSyncUnsafe() results must contain theSameElementsAs (peopleEntries.map(_.name)) } "can take head element" in { val result = - ZIO.service[DataSource].mapAttempt(ds => ds.getConnection).acquireReleaseWithAuto { conn => - ZIO.attempt { - val stmt = conn.prepareStatement("select * from person where name = 'Alex'") - val rs = new ResultSetIterator(stmt.executeQuery(), conn, extractor = (rs, conn) => { rs.getString(1) }) - rs.head + ZIO + .service[DataSource] + .mapAttempt(ds => ds.getConnection) + .acquireReleaseWithAuto { conn => + ZIO.attempt { + val stmt = conn.prepareStatement("select * from person where name = 'Alex'") + val rs = new ResultSetIterator(stmt.executeQuery(), conn, extractor = (rs, conn) => { rs.getString(1) }) + rs.head + } } - }.runSyncUnsafe() + .runSyncUnsafe() result must equal("Alex") } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/ZioSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/ZioSpec.scala index 731202dda..3b71a5536 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/ZioSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/ZioSpec.scala @@ -2,8 +2,8 @@ package io.getquill import io.getquill.context.qzio.ImplicitSyntax._ import org.scalatest.BeforeAndAfterAll -import zio.stream.{ ZSink, ZStream } -import zio.{ Runtime, Tag, Unsafe, ZEnvironment, ZIO, ZLayer } +import zio.stream.{ZSink, ZStream} +import zio.{Runtime, Tag, Unsafe, ZEnvironment, ZIO, ZLayer} import java.sql.Connection import javax.sql.DataSource @@ -49,7 +49,9 @@ trait ZioProxySpec extends Spec with BeforeAndAfterAll { def collect[T](stream: ZStream[DataSource, Throwable, T])(implicit runtime: Implicit[DataSource]): List[T] = Unsafe.unsafe { implicit unsafe => - zio.Runtime.default.unsafe.run(stream.run(ZSink.collectAll).map(_.toList).provideEnvironment(ZEnvironment(runtime.env))).getOrThrow() + zio.Runtime.default.unsafe + .run(stream.run(ZSink.collectAll).map(_.toList).provideEnvironment(ZEnvironment(runtime.env))) + .getOrThrow() } def collect[T](qzio: ZIO[DataSource, Throwable, T])(implicit runtime: Implicit[DataSource]): T = diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticApp.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticApp.scala index 09287213e..284a89f2e 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticApp.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticApp.scala @@ -11,10 +11,11 @@ object IdiomaticApp extends ZIOAppDefault { override def run = (for { - joes <- Application.getPeopleByName("Joe") - _ <- printLine(joes) + joes <- Application.getPeopleByName("Joe") + _ <- printLine(joes) allPeople <- Application.getAllPeople() - _ <- printLine(allPeople) + _ <- printLine(allPeople) } yield ()) - .provide(applicationLive, dataServiceLive, dataSourceLive, postgresLive).exitCode + .provide(applicationLive, dataServiceLive, dataSourceLive, postgresLive) + .exitCode } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppData.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppData.scala index 63624ed15..493f0b255 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppData.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppData.scala @@ -10,13 +10,13 @@ object IdiomaticAppData { case class DataService(quill: Quill.Postgres[Literal]) { import quill._ - val people = quote { query[Person] } - def peopleByName = quote { (name: String) => people.filter(p => p.name == name) } + val people = quote(query[Person]) + def peopleByName = quote((name: String) => people.filter(p => p.name == name)) } case class ApplicationLive(dataService: DataService) { import dataService.quill._ def getPeopleByName(name: String): ZIO[Any, SQLException, List[Person]] = run(dataService.peopleByName(lift(name))) - def getAllPeople(): ZIO[Any, SQLException, List[Person]] = run(dataService.people) + def getAllPeople(): ZIO[Any, SQLException, List[Person]] = run(dataService.people) } object Application { def getPeopleByName(name: String) = @@ -28,7 +28,7 @@ object IdiomaticAppData { object Layers { val dataServiceLive = ZLayer.fromFunction(DataService.apply _) val applicationLive = ZLayer.fromFunction(ApplicationLive.apply _) - val dataSourceLive = Quill.DataSource.fromPrefix("testPostgresDB") - val postgresLive = Quill.Postgres.fromNamingStrategy(Literal) + val dataSourceLive = Quill.DataSource.fromPrefix("testPostgresDB") + val postgresLive = Quill.Postgres.fromNamingStrategy(Literal) } -} \ No newline at end of file +} diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppPlain.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppPlain.scala index a9075ad41..06299dfee 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppPlain.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/IdiomaticAppPlain.scala @@ -12,14 +12,16 @@ object IdiomaticAppPlain { def main(args: Array[String]): Unit = { Unsafe.unsafe { implicit unsafe => - Runtime.default.unsafe.run( - (for { - joes <- Application.getPeopleByName("Joe") - _ <- printLine(joes) - allPeople <- Application.getAllPeople() - _ <- printLine(allPeople) - } yield ()).provide(applicationLive, dataServiceLive, dataSourceLive, postgresLive) - ).getOrThrow() + Runtime.default.unsafe + .run( + (for { + joes <- Application.getPeopleByName("Joe") + _ <- printLine(joes) + allPeople <- Application.getAllPeople() + _ <- printLine(allPeople) + } yield ()).provide(applicationLive, dataServiceLive, dataSourceLive, postgresLive) + ) + .getOrThrow() } () } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/DataServiceLive.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/DataServiceLive.scala index 973b85a07..685f83905 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/DataServiceLive.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/DataServiceLive.scala @@ -7,11 +7,10 @@ import zio._ import javax.sql.DataSource /** - * Not sure why but if you put this into the same class as the caller (e.g. ZioAppExample) - * then dotty will throw: - * {{ - * DataService.getPeople.provide(Ctx.dataSourceLayer, (DataServiceLive.apply(_)).toLayer) - * }} + * Not sure why but if you put this into the same class as the caller (e.g. + * ZioAppExample) then dotty will throw: {{ + * DataService.getPeople.provide(Ctx.dataSourceLayer, + * (DataServiceLive.apply(_)).toLayer) }} */ object ZioAppExampleServices { object QuillContext extends PostgresZioJdbcContext(SnakeCase) { @@ -21,7 +20,8 @@ object ZioAppExampleServices { final case class DataServiceLive(dataSource: DataSource) { import QuillContext._ def getPeople = run(query[Person]).provideEnvironment(ZEnvironment(dataSource)) - def getPeopleOlderThan(age: Int) = run(query[Person].filter(p => p.age > lift(age))).provideEnvironment(ZEnvironment(dataSource)) + def getPeopleOlderThan(age: Int) = + run(query[Person].filter(p => p.age > lift(age))).provideEnvironment(ZEnvironment(dataSource)) } object DataService { @@ -34,4 +34,4 @@ object ZioAppExampleServices { object DataServiceLive { val layer = ZLayer.fromFunction(DataServiceLive.apply) } -} \ No newline at end of file +} diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainApp.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainApp.scala index f5889e45a..07fbd93d7 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainApp.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainApp.scala @@ -1,8 +1,8 @@ package io.getquill.examples.other -import io.getquill.{ Literal, PostgresZioJdbcContext } +import io.getquill.{Literal, PostgresZioJdbcContext} import io.getquill.context.ZioJdbc._ -import zio.{ Runtime, Unsafe } +import zio.{Runtime, Unsafe} import io.getquill._ import io.getquill.jdbczio.Quill @@ -20,7 +20,8 @@ object PlainApp { query[Person].filter(p => p.name == "Alex") } val qzio = - MyPostgresContext.run(people) + MyPostgresContext + .run(people) .tap(result => zio.ZIO.attempt(println(result.toString))) .provideLayer(zioDS) diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource.scala index 10e6de06f..01e1a02dc 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource.scala @@ -3,7 +3,7 @@ package io.getquill.examples.other import com.zaxxer.hikari.HikariDataSource import io.getquill.context.ZioJdbc._ import io.getquill.util.LoadConfig -import io.getquill.{ JdbcContextConfig, Literal, PostgresZioJdbcContext } +import io.getquill.{JdbcContextConfig, Literal, PostgresZioJdbcContext} import zio.Console.printLine import zio.Runtime import io.getquill._ @@ -25,7 +25,8 @@ object PlainAppDataSource { query[Person].filter(p => p.name == "Alex") } val qzio = - MyPostgresContext.run(people) + MyPostgresContext + .run(people) .tap(result => printLine(result.toString)) .provide(zioDS) diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource2.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource2.scala index afe298d17..defb47f7d 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource2.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/PlainAppDataSource2.scala @@ -1,10 +1,10 @@ package io.getquill.examples.other -import com.zaxxer.hikari.{ HikariConfig, HikariDataSource } +import com.zaxxer.hikari.{HikariConfig, HikariDataSource} import io.getquill.util.LoadConfig -import io.getquill.{ JdbcContextConfig, Literal, PostgresZioJdbcContext } +import io.getquill.{JdbcContextConfig, Literal, PostgresZioJdbcContext} import zio.Console.printLine -import zio.{ Runtime, Unsafe, Task, ZLayer } +import zio.{Runtime, Unsafe, Task, ZLayer} import javax.sql.DataSource import io.getquill._ import zio.ZIO @@ -16,7 +16,7 @@ object PlainAppDataSource2 { case class Person(name: String, age: Int) - def hikariConfig = new HikariConfig(JdbcContextConfig(LoadConfig("testPostgresDB")).configProperties) + def hikariConfig = new HikariConfig(JdbcContextConfig(LoadConfig("testPostgresDB")).configProperties) def hikariDataSource = new HikariDataSource(hikariConfig) val zioDS: ZLayer[Any, Throwable, DataSource] = @@ -27,7 +27,8 @@ object PlainAppDataSource2 { query[Person].filter(p => p.name == "Alex") } val qzio = - MyPostgresContext.run(people) + MyPostgresContext + .run(people) .tap(result => printLine(result.toString)) .provide(zioDS) diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ServiceExample.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ServiceExample.scala index 9b54e9212..46036a530 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ServiceExample.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ServiceExample.scala @@ -1,8 +1,8 @@ package io.getquill.examples.other import io.getquill.jdbczio.Quill -import io.getquill.{ Literal, PostgresZioJdbcContext } -import zio.{ ZIOAppDefault, ZIO, ZLayer } +import io.getquill.{Literal, PostgresZioJdbcContext} +import zio.{ZIOAppDefault, ZIO, ZLayer} import zio.Console._ import io.getquill._ @@ -12,16 +12,15 @@ import javax.sql.DataSource object ServiceExample extends ZIOAppDefault { import DBModel._ - override def run = { + override def run = runApp.provideLayer(DBManager.live).exitCode - } def runApp = for { - _ <- DBManager.deleteJoes - _ <- DBManager.persist(Person("Joe", 123)) + _ <- DBManager.deleteJoes + _ <- DBManager.persist(Person("Joe", 123)) joes <- DBManager.retrieveJoes - _ <- printLine(s"Joes: ${joes}") + _ <- printLine(s"Joes: ${joes}") } yield () } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioApp.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioApp.scala index 69d7e0150..79750840e 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioApp.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioApp.scala @@ -18,7 +18,8 @@ object ZioApp extends ZIOAppDefault { val people = quote { query[Person].filter(p => p.name == "Alex") } - MyPostgresContext.run(people) + MyPostgresContext + .run(people) .tap(result => printLine(result.toString)) .provide(zioDS) .exitCode diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppDataSource.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppDataSource.scala index 46902c493..0b1023f67 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppDataSource.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppDataSource.scala @@ -3,7 +3,7 @@ package io.getquill.examples.other import io.getquill._ import io.getquill.util.LoadConfig import zio.Console.printLine -import zio.{ ZEnvironment, ZIOAppDefault } +import zio.{ZEnvironment, ZIOAppDefault} object ZioAppDataSource extends ZIOAppDefault { @@ -18,7 +18,8 @@ object ZioAppDataSource extends ZIOAppDefault { val people = quote { query[Person].filter(p => p.name == "Alex") } - MyPostgresContext.run(people) + MyPostgresContext + .run(people) .provideEnvironment(ZEnvironment(dataSource)) .tap(result => printLine(result.toString)) .exitCode diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala index bf7d9d1ad..f64fb5daf 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppImplicitEnv.scala @@ -19,16 +19,14 @@ object ZioAppImplicitEnv extends ZIOAppDefault { import Ctx._ given Implicit[DataSource] = Implicit(ds) - val joes = Ctx.run(query[Person].filter(p => p.name == "Joe")).implicitly - val jills = Ctx.run(query[Person].filter(p => p.name == "Jill")).implicitly + val joes = Ctx.run(query[Person].filter(p => p.name == "Joe")).implicitly + val jills = Ctx.run(query[Person].filter(p => p.name == "Jill")).implicitly val alexes = Ctx.run(query[Person].filter(p => p.name == "Alex")).implicitly - val janes = Ctx.stream(query[Person].filter(p => p.name == "Jane")).implicitly.runCollect + val janes = Ctx.stream(query[Person].filter(p => p.name == "Jane")).implicitly.runCollect } - override def run = { - MyQueryService(dataSource) - .joes + override def run = + MyQueryService(dataSource).joes .tap(result => printLine(result.toString)) .exitCode - } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppManual.scala b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppManual.scala index 675cdf3be..fec1ab690 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppManual.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/examples/other/ZioAppManual.scala @@ -2,7 +2,7 @@ package io.getquill.examples.other import io.getquill._ import io.getquill.util.LoadConfig -import zio.{ ZIOAppDefault, ZLayer } +import zio.{ZIOAppDefault, ZLayer} import zio.Console.printLine import javax.sql.DataSource @@ -19,7 +19,8 @@ object ZioAppManual extends ZIOAppDefault { val people = quote { query[Person].filter(p => p.name == "Alex") } - MyPostgresContext.run(people) + MyPostgresContext + .run(people) .tap(result => printLine(result.toString)) .provide(ZLayer.succeed(ds)) .exitCode diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioJdbcSpec.scala index 3a4e0c34d..007bc1c56 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioJdbcSpec.scala @@ -27,7 +27,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 2 - range simple" in { - testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` + testContext + .run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) + .runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` } "Example 3 - satisfies" in { @@ -39,7 +41,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 5 - compose" in { - testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result` + testContext + .run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))) + .runSyncUnsafe() mustEqual `Ex 5 expected result` } "Example 6 - predicate 0" in { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioReturningSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioReturningSpec.scala index c91c23034..7c35c70bc 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioReturningSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/h2/PeopleZioReturningSpec.scala @@ -5,7 +5,6 @@ import io.getquill._ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { - val context: testContext.type = testContext import testContext._ @@ -21,7 +20,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 0 insert.returning(_.generatedColumn) mod" in { import `Ex 0 insert.returning(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output.toSet mustEqual result(id).toSet)).runSyncUnsafe() } @@ -35,7 +34,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 1 insert.returningMany(_.generatedColumn) mod" in { import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output mustEqual result(id.head))).runSyncUnsafe() } @@ -45,8 +44,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 2 update.returningMany(_.singleColumn) mod`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -61,8 +60,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 4 update.returningMany(query)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/h2/PrepareJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/h2/PrepareJdbcSpec.scala index 5a98db5c6..a8678475b 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/h2/PrepareJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/h2/PrepareJdbcSpec.scala @@ -2,7 +2,7 @@ package io.getquill.h2 import io.getquill.context.qzio.ImplicitSyntax.Implicit import javax.sql.DataSource -import java.sql.{ Connection, ResultSet } +import java.sql.{Connection, ResultSet} import io.getquill.PrepareZioJdbcSpecBase import org.scalatest.BeforeAndAfter @@ -10,7 +10,7 @@ import io.getquill._ class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter { - implicit val ds: Implicit[DataSource] = Implicit(pool) + implicit val ds: Implicit[DataSource] = Implicit(pool) val context: testContext.underlying.type = testContext.underlying import testContext.underlying._ diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/h2/ProductJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/h2/ProductJdbcSpec.scala index de28d12e6..f0eef632b 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/h2/ProductJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/h2/ProductJdbcSpec.scala @@ -20,7 +20,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Insert multiple products" in { val (inserted, product) = (for { - i <- testContext.run(liftQuery(productEntries).foreach(e => productInsert(e))) + i <- testContext.run(liftQuery(productEntries).foreach(e => productInsert(e))) ps <- testContext.run(productById(lift(i(2)))) } yield (i, ps.head)).runSyncUnsafe() @@ -31,7 +31,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Single insert product" in { val (inserted, product) = (for { - i <- testContext.run(productSingleInsert) + i <- testContext.run(productSingleInsert) ps <- testContext.run(productById(lift(i))) } yield (i, ps.head)).runSyncUnsafe() product.description mustEqual "Window" @@ -43,8 +43,8 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val (inserted, returnedProduct) = (for { i <- testContext.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) - } + product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) + } rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -60,7 +60,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { } val (inserted, returnedProduct) = (for { - i <- testContext.run(q1) + i <- testContext.run(q1) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -73,7 +73,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val prd = Product(0L, "test3", 3L) val (inserted, returnedProduct) = (for { - i <- testContext.run(productInsert(lift(prd))) + i <- testContext.run(productInsert(lift(prd))) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/h2/ZioJdbcContextSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/h2/ZioJdbcContextSpec.scala index 7dd0f99d6..41c08a04a 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/h2/ZioJdbcContextSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/h2/ZioJdbcContextSpec.scala @@ -1,7 +1,7 @@ package io.getquill.h2 import io.getquill.ZioSpec -import zio.{ Task, ZIO } +import zio.{Task, ZIO} import io.getquill._ class ZioJdbcContextSpec extends ZioSpec { @@ -13,7 +13,7 @@ class ZioJdbcContextSpec extends ZioSpec { "success" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } + _ <- testContext.transaction(testContext.run(qr1.insert(_.i -> 33))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -21,11 +21,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) seq <- testContext.transaction { - for { - _ <- testContext.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.stream(qr1)) - } yield s - } + for { + _ <- testContext.run(qr1.insert(_.i -> 33)) + s <- accumulate(testContext.stream(qr1)) + } yield s + } r <- testContext.run(qr1) } yield (seq.map(_.i), r.map(_.i))).runSyncUnsafe() mustEqual ((List(33), List(33))) } @@ -33,32 +33,37 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) e <- testContext.transaction { - testContext.run(qr1.insert(_.i -> 36)) *> - testContext.transaction { - ZIO.collectAll(Seq( - testContext.run(qr1.insert(_.i -> 18)), - ZIO.attempt { - throw new IllegalStateException - } - )) - } - }.catchSome { - case e: Exception => ZIO.attempt(e.getClass.getSimpleName) - } + testContext.run(qr1.insert(_.i -> 36)) *> + testContext.transaction { + ZIO.collectAll( + Seq( + testContext.run(qr1.insert(_.i -> 18)), + ZIO.attempt { + throw new IllegalStateException + } + ) + ) + } + }.catchSome { case e: Exception => + ZIO.attempt(e.getClass.getSimpleName) + } r <- testContext.run(qr1) } yield (e, r.isEmpty)).runSyncUnsafe() mustEqual (("IllegalStateException", true)) } "nested" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } } + _ <- testContext.transaction(testContext.transaction(testContext.run(qr1.insert(_.i -> 33)))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } "prepare" in { - testContext.prepareParams( - "select * from Person where name=? and age > ?", (ps, session) => (List("Sarah", 127), ps) - ).runSyncUnsafe() mustEqual List("127", "'Sarah'") + testContext + .prepareParams( + "select * from Person where name=? and age > ?", + (ps, session) => (List("Sarah", 127), ps) + ) + .runSyncUnsafe() mustEqual List("127", "'Sarah'") } } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/integration/StreamResultsOrBlowUpSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/integration/StreamResultsOrBlowUpSpec.scala index d61d491cf..92cfcd56e 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/integration/StreamResultsOrBlowUpSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/integration/StreamResultsOrBlowUpSpec.scala @@ -1,6 +1,6 @@ package io.getquill.integration -import java.sql.{ Connection, ResultSet } +import java.sql.{Connection, ResultSet} import org.scalatest.matchers.should.Matchers._ import io.getquill._ @@ -9,12 +9,13 @@ import io.getquill.context.qzio.ImplicitSyntax.Implicit import javax.sql.DataSource /** - * This is a long-running test that will cause a OutOfMemory exception if - * a ResultSet is not streamed correctly (e.g. if the ResultSet.TYPE_SCROLL_SENSITIVE option - * is used which will force most databases to put the entire ResultSet into memory). - * Run with -Xmx200m and doBlowUp=true to correctly reproduce the error. - * You can also use -Xmx100m but then it will blow up due to a GC Limit OutOfMemory as opposed - * to a heap space OutOfMemory. + * This is a long-running test that will cause a OutOfMemory exception if a + * ResultSet is not streamed correctly (e.g. if the + * ResultSet.TYPE_SCROLL_SENSITIVE option is used which will force most + * databases to put the entire ResultSet into memory). Run with -Xmx200m and + * doBlowUp=true to correctly reproduce the error. You can also use -Xmx100m but + * then it will blow up due to a GC Limit OutOfMemory as opposed to a heap space + * OutOfMemory. * * As a default, this test will run as part of the suite without blowing up. */ @@ -40,12 +41,12 @@ class StreamResultsOrBlowUpSpec extends ZioProxySpec { stmt } } - import ctx.{ run => runQuill, _ } - val inserts = quote { - (numRows: Long) => - sql"""insert into person (name, age) select md5(random()::text), random()*10+1 from generate_series(1, ${numRows}) s(i)""".as[Insert[Int]] + import ctx.{run => runQuill, _} + val inserts = quote { (numRows: Long) => + sql"""insert into person (name, age) select md5(random()::text), random()*10+1 from generate_series(1, ${numRows}) s(i)""" + .as[Insert[Int]] } - val deletes = runQuill { sql"TRUNCATE TABLE Person".as[Delete[Person]] } + val deletes = runQuill(sql"TRUNCATE TABLE Person".as[Delete[Person]]) val numRows = 1000000L @@ -55,16 +56,15 @@ class StreamResultsOrBlowUpSpec extends ZioProxySpec { runQuill(inserts(lift(numRows))).onDataSource.runSyncUnsafe() // not sure why but foreachL causes a OutOfMemory exception anyhow, and firstL causes a ResultSet Closed exception - val result = stream(query[Person], 100) - .zipWithIndex - .runFold(0L)({ + val result = stream(query[Person], 100).zipWithIndex + .runFold(0L) { case (totalYears, (person, index)) => { // Need to print something out as we stream or github actions will think the build is stalled and kill it with the following message: // "No output has been received in the last 10m0s..." if (index % 10000 == 0) println(s"Streaming Test Row: ${index}") totalYears + person.age } - }) + } .onDataSource .runSyncUnsafe() @@ -80,16 +80,15 @@ class StreamResultsOrBlowUpSpec extends ZioProxySpec { runQuill(inserts(lift(numRows))).onDataSource.runSyncUnsafe() // not sure why but foreachL causes a OutOfMemory exception anyhow, and firstL causes a ResultSet Closed exception - val result = stream(query[Person], 100) - .zipWithIndex - .runFold(0L)({ + val result = stream(query[Person], 100).zipWithIndex + .runFold(0L) { case (totalYears, (person, index)) => { // Need to print something out as we stream or github actions will think the build is stalled and kill it with the following message: // "No output has been received in the last 10m0s..." if (index % 10000 == 0) println(s"Streaming Test Row: ${index}") totalYears + person.age } - }) + } .onDataSource .runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/misc/ImplicitEnvPatternSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/misc/ImplicitEnvPatternSpec.scala index 8e7fe8204..f2438cb7b 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/misc/ImplicitEnvPatternSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/misc/ImplicitEnvPatternSpec.scala @@ -1,6 +1,6 @@ package io.getquill.misc -import io.getquill.{ JdbcContextConfig, PeopleZioSpec } +import io.getquill.{JdbcContextConfig, PeopleZioSpec} import java.io.Closeable import javax.sql.DataSource @@ -32,8 +32,8 @@ class ImplicitEnvPatternSpec extends PeopleZioProxySpec { implicit val env: Implicit[DataSource] = Implicit(ds) def alexes = testContext.run(query[Person].filter(p => p.name == "Alex")) - def berts = testContext.run(query[Person].filter(p => p.name == "Bert")) - def coras = testContext.run(query[Person].filter(p => p.name == "Cora")) + def berts = testContext.run(query[Person].filter(p => p.name == "Bert")) + def coras = testContext.run(query[Person].filter(p => p.name == "Cora")) } def makeDataSource() = io.getquill.postgres.pool @@ -43,10 +43,10 @@ class ImplicitEnvPatternSpec extends PeopleZioProxySpec { ZIO.scoped { ZIO.attempt(makeDataSource()).flatMap { ds => for { - svc <- ZIO.attempt(MyService(ds)) + svc <- ZIO.attempt(MyService(ds)) alexes <- svc.alexes - berts <- svc.berts - coras <- svc.coras + berts <- svc.berts + coras <- svc.coras } yield (alexes, berts, coras) } }.runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/misc/OnDataSourceSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/misc/OnDataSourceSpec.scala index 8daf69673..3ef59cd2d 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/misc/OnDataSourceSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/misc/OnDataSourceSpec.scala @@ -3,7 +3,7 @@ package io.getquill.misc import io.getquill.PeopleZioSpec import org.scalatest.matchers.should.Matchers._ -import zio.{ ZIO, ZLayer } +import zio.{ZIO, ZLayer} import io.getquill.context.ZioJdbc._ import io.getquill._ @@ -31,13 +31,12 @@ class OnDataSourceSpec extends PeopleZioProxySpec { "should work with additional dependency" in { // This is how you import the decoders of `underlying` context without importing things that will conflict // i.e. the quote and run methods - import testContext.underlying.{ run => _, _ } + import testContext.underlying.{run => _, _} val people = (for { - n <- ZIO.service[String] + n <- ZIO.service[String] out <- testContext.underlying.run(query[Person].filter(p => p.name == lift(n))) - } yield out) - .onSomeDataSource + } yield out).onSomeDataSource .provideSomeLayer[DataSource](ZLayer.succeed("Alex")) .runSyncUnsafe() @@ -46,12 +45,11 @@ class OnDataSourceSpec extends PeopleZioProxySpec { "should work" in { // This is how you import the encoders/decoders of `underlying` context without importing things that will conflict // i.e. the quote and run methods - import testContext.underlying.{ prepare => _, run => _, _ } + import testContext.underlying.{prepare => _, run => _, _} val people = (for { out <- testContext.underlying.run(query[Person].filter(p => p.name == "Alex")) - } yield out) - .onDataSource + } yield out).onDataSource .runSyncUnsafe() people mustEqual peopleEntries.filter(p => p.name == "Alex") @@ -68,16 +66,15 @@ class OnDataSourceSpec extends PeopleZioProxySpec { implicit val dsi: Implicit[DataSource] = Implicit(ds) val people = (for { - n <- ZIO.service[String] + n <- ZIO.service[String] out <- testContext.run(query[Person].filter(p => p.name == lift(n))) - } yield out) - .implicitSomeDS + } yield out).implicitSomeDS .provide(ZLayer.succeed("Alex")) .runSyncUnsafe() } (for { - ds <- ZIO.service[DataSource] + ds <- ZIO.service[DataSource] svc <- ZIO.attempt(Service(ds)) } yield (svc.people)).runSyncUnsafe() mustEqual peopleEntries.filter(p => p.name == "Alex") } @@ -89,15 +86,14 @@ class OnDataSourceSpec extends PeopleZioProxySpec { val people = (for { out <- testContext.run(query[Person].filter(p => p.name == "Alex")) - } yield out) - .implicitDS + } yield out).implicitDS .runSyncUnsafe() } (for { - ds <- ZIO.service[DataSource] + ds <- ZIO.service[DataSource] svc <- ZIO.attempt(Service(ds)) } yield (svc.people)).runSyncUnsafe() mustEqual peopleEntries.filter(p => p.name == "Alex") } } -} \ No newline at end of file +} diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/misc/PrepareJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/misc/PrepareJdbcSpec.scala index 0043e054c..1b6f42b21 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/misc/PrepareJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/misc/PrepareJdbcSpec.scala @@ -1,10 +1,10 @@ package io.getquill.misc -import io.getquill.{ PrepareZioJdbcSpecBase, ZioSpec } +import io.getquill.{PrepareZioJdbcSpecBase, ZioSpec} import org.scalatest.BeforeAndAfter import io.getquill._ -import java.sql.{ Connection, ResultSet } +import java.sql.{Connection, ResultSet} class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with ZioProxySpec with BeforeAndAfter { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/misc/StreamingWithFetchSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/misc/StreamingWithFetchSpec.scala index 6256b3c2c..2eabbc540 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/misc/StreamingWithFetchSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/misc/StreamingWithFetchSpec.scala @@ -15,7 +15,7 @@ class StreamingWithFetchSpec extends ZioProxySpec with BeforeAndAfter { case class Person(name: String, age: Int) inline def selectAll = quote(query[Person]) - inline def insert = quote { (p: Person) => query[Person].insertValue(p) } + inline def insert = quote((p: Person) => query[Person].insertValue(p)) def result[T](qzio: QIO[T]): T = Unsafe.unsafe { implicit unsafe => diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/misc/ZioJdbcUnderlyingContextSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/misc/ZioJdbcUnderlyingContextSpec.scala index 412cb487d..226e26de1 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/misc/ZioJdbcUnderlyingContextSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/misc/ZioJdbcUnderlyingContextSpec.scala @@ -2,7 +2,7 @@ package io.getquill.misc import io.getquill.context.ZioJdbc._ import io.getquill.ZioSpec -import zio.{ Task, ZIO, ZLayer } +import zio.{Task, ZIO, ZLayer} import io.getquill._ import javax.sql.DataSource @@ -21,7 +21,7 @@ class ZioJdbcUnderlyingContextSpec extends ZioProxySpec { "success" in { (for { _ <- testContext.underlying.run(qr1.delete) - _ <- testContext.underlying.transaction { testContext.underlying.run(qr1.insert(_.i -> 33)) } + _ <- testContext.underlying.transaction(testContext.underlying.run(qr1.insert(_.i -> 33))) r <- testContext.underlying.run(qr1) } yield r).onDataSource.runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -29,11 +29,11 @@ class ZioJdbcUnderlyingContextSpec extends ZioProxySpec { (for { _ <- testContext.underlying.run(qr1.delete) _ <- testContext.underlying.transaction { - for { - env <- ZIO.service[Int] - qry <- testContext.underlying.run(qr1.insert(_.i -> lift(env))) - } yield qry - } + for { + env <- ZIO.service[Int] + qry <- testContext.underlying.run(qr1.insert(_.i -> lift(env))) + } yield qry + } r <- testContext.underlying.run(qr1) } yield r).onSomeDataSource.provideSomeLayer(ZLayer.succeed(33)).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -41,11 +41,11 @@ class ZioJdbcUnderlyingContextSpec extends ZioProxySpec { (for { _ <- testContext.underlying.run(qr1.delete) seq <- testContext.underlying.transaction { - for { - _ <- testContext.underlying.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.underlying.stream(qr1)) - } yield s - } + for { + _ <- testContext.underlying.run(qr1.insert(_.i -> 33)) + s <- accumulate(testContext.underlying.stream(qr1)) + } yield s + } r <- testContext.underlying.run(qr1) } yield (seq.map(_.i), r.map(_.i))).onDataSource.runSyncUnsafe() mustEqual ((List(33), List(33))) } @@ -53,25 +53,29 @@ class ZioJdbcUnderlyingContextSpec extends ZioProxySpec { (for { _ <- testContext.underlying.run(qr1.delete) e <- testContext.underlying.transaction { - testContext.underlying.run(qr1.insert(_.i -> 36)) *> - testContext.underlying.transaction { - ZIO.collectAll(Seq( - testContext.underlying.run(qr1.insert(_.i -> 18)), - ZIO.attempt { - throw new IllegalStateException - } - )) - } - }.catchSome { - case e: Exception => ZIO.attempt(e.getClass.getSimpleName) - } + testContext.underlying.run(qr1.insert(_.i -> 36)) *> + testContext.underlying.transaction { + ZIO.collectAll( + Seq( + testContext.underlying.run(qr1.insert(_.i -> 18)), + ZIO.attempt { + throw new IllegalStateException + } + ) + ) + } + }.catchSome { case e: Exception => + ZIO.attempt(e.getClass.getSimpleName) + } r <- testContext.underlying.run(qr1) } yield (e, r.isEmpty)).onDataSource.runSyncUnsafe() mustEqual (("IllegalStateException", true)) } "nested" in { (for { _ <- testContext.underlying.run(qr1.delete) - _ <- testContext.underlying.transaction { testContext.underlying.transaction { testContext.underlying.run(qr1.insert(_.i -> 33)) } } + _ <- testContext.underlying.transaction { + testContext.underlying.transaction(testContext.underlying.run(qr1.insert(_.i -> 33))) + } r <- testContext.underlying.run(qr1) } yield r).onDataSource.runSyncUnsafe().map(_.i) mustEqual List(33) } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioJdbcSpec.scala index 14469616d..99a4e4c86 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioJdbcSpec.scala @@ -26,7 +26,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 2 - range simple" in { - testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` + testContext + .run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) + .runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` } "Example 3 - satisfies" in { @@ -38,7 +40,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 5 - compose" in { - testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result` + testContext + .run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))) + .runSyncUnsafe() mustEqual `Ex 5 expected result` } "Example 6 - predicate 0" in { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioReturningSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioReturningSpec.scala index 068a8011e..caaabe9c4 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioReturningSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PeopleZioReturningSpec.scala @@ -5,7 +5,6 @@ import io.getquill._ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { - val context: testContext.type = testContext import testContext._ @@ -21,7 +20,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 0 insert.returning(_.generatedColumn) mod" in { import `Ex 0 insert.returning(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output.toSet mustEqual result(id).toSet)).runSyncUnsafe() } @@ -35,7 +34,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 1 insert.returningMany(_.generatedColumn) mod" in { import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output mustEqual result(id.head))).runSyncUnsafe() } @@ -45,8 +44,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 2 update.returningMany(_.singleColumn) mod`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -61,8 +60,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 4 update.returningMany(query)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PrepareJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PrepareJdbcSpec.scala index 0795a4b43..2c571d948 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PrepareJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/PrepareJdbcSpec.scala @@ -1,6 +1,6 @@ package io.getquill.mysql -import java.sql.{ Connection, ResultSet } +import java.sql.{Connection, ResultSet} import javax.sql.DataSource import io.getquill.PrepareZioJdbcSpecBase import io.getquill.context.qzio.ImplicitSyntax.Implicit @@ -10,7 +10,7 @@ import io.getquill._ class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter { - implicit val ds: Implicit[DataSource] = Implicit(pool) + implicit val ds: Implicit[DataSource] = Implicit(pool) val context: testContext.underlying.type = testContext.underlying import testContext.underlying._ diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ProductJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ProductJdbcSpec.scala index 65a470cc6..9f396f114 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ProductJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ProductJdbcSpec.scala @@ -20,7 +20,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Insert multiple products" in { val (inserted, product) = (for { - i <- testContext.run(liftQuery(productEntries).foreach(e => productInsert(e))) + i <- testContext.run(liftQuery(productEntries).foreach(e => productInsert(e))) ps <- testContext.run(productById(lift(i(2)))) } yield (i, ps.head)).runSyncUnsafe() @@ -31,7 +31,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Single insert product" in { val (inserted, product) = (for { - i <- testContext.run(productSingleInsert) + i <- testContext.run(productSingleInsert) ps <- testContext.run(productById(lift(i))) } yield (i, ps.head)).runSyncUnsafe() product.description mustEqual "Window" @@ -43,8 +43,8 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val (inserted, returnedProduct) = (for { i <- testContext.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) - } + product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) + } rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -60,7 +60,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { } val (inserted, returnedProduct) = (for { - i <- testContext.run(q1) + i <- testContext.run(q1) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -73,7 +73,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val prd = Product(0L, "test3", 3L) val (inserted, returnedProduct) = (for { - i <- testContext.run(productInsert(lift(prd))) + i <- testContext.run(productInsert(lift(prd))) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ZioJdbcContextSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ZioJdbcContextSpec.scala index cfcd1533a..d6b64cfbb 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ZioJdbcContextSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/mysql/ZioJdbcContextSpec.scala @@ -1,7 +1,7 @@ package io.getquill.mysql import io.getquill.ZioSpec -import zio.{ Task, ZIO, ZLayer } +import zio.{Task, ZIO, ZLayer} import io.getquill.context.ZioJdbc._ import javax.sql.DataSource @@ -16,7 +16,7 @@ class ZioJdbcContextSpec extends ZioSpec { "success" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } + _ <- testContext.transaction(testContext.run(qr1.insert(_.i -> 33))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -24,11 +24,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) _ <- testContext.transaction { - for { - env <- ZIO.service[Int] - qry <- testContext.run(qr1.insert(_.i -> lift(env))) - } yield qry - } + for { + env <- ZIO.service[Int] + qry <- testContext.run(qr1.insert(_.i -> lift(env))) + } yield qry + } r <- testContext.run(qr1) } yield r).provideSomeLayer(ZLayer.succeed(33)).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -36,11 +36,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) seq <- testContext.transaction { - for { - _ <- testContext.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.stream(qr1)) - } yield s - } + for { + _ <- testContext.run(qr1.insert(_.i -> 33)) + s <- accumulate(testContext.stream(qr1)) + } yield s + } r <- testContext.run(qr1) } yield (seq.map(_.i), r.map(_.i))).runSyncUnsafe() mustEqual ((List(33), List(33))) } @@ -48,32 +48,37 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) e <- testContext.transaction { - testContext.run(qr1.insert(_.i -> 36)) *> - testContext.transaction { - ZIO.collectAll(Seq( - testContext.run(qr1.insert(_.i -> 18)), - ZIO.attempt { - throw new IllegalStateException - } - )) - } - }.catchSome { - case e: Exception => ZIO.attempt(e.getClass.getSimpleName) - } + testContext.run(qr1.insert(_.i -> 36)) *> + testContext.transaction { + ZIO.collectAll( + Seq( + testContext.run(qr1.insert(_.i -> 18)), + ZIO.attempt { + throw new IllegalStateException + } + ) + ) + } + }.catchSome { case e: Exception => + ZIO.attempt(e.getClass.getSimpleName) + } r <- testContext.run(qr1) } yield (e, r.isEmpty)).runSyncUnsafe() mustEqual (("IllegalStateException", true)) } "nested" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } } + _ <- testContext.transaction(testContext.transaction(testContext.run(qr1.insert(_.i -> 33)))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } "prepare" in { - testContext.prepareParams( - "select * from Person where name=? and age > ?", (ps, session) => (List("Sarah", 127), ps) - ).runSyncUnsafe() mustEqual List("127", "'Sarah'") + testContext + .prepareParams( + "select * from Person where name=? and age > ?", + (ps, session) => (List("Sarah", 127), ps) + ) + .runSyncUnsafe() mustEqual List("127", "'Sarah'") } } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioJdbcSpec.scala index 6b3d342fb..8dfc58f3b 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioJdbcSpec.scala @@ -27,7 +27,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 2 - range simple" in { - testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` + testContext + .run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) + .runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` } "Example 3 - satisfies" in { @@ -39,7 +41,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 5 - compose" in { - testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result` + testContext + .run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))) + .runSyncUnsafe() mustEqual `Ex 5 expected result` } "Example 6 - predicate 0" in { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioReturningSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioReturningSpec.scala index 28570c17d..912018ccb 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioReturningSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PeopleZioReturningSpec.scala @@ -5,7 +5,6 @@ import io.getquill._ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { - val context: testContext.type = testContext import testContext._ @@ -21,7 +20,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 0 insert.returning(_.generatedColumn) mod" in { import `Ex 0 insert.returning(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output.toSet mustEqual result(id).toSet)).runSyncUnsafe() } @@ -30,14 +29,14 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 0.5 insert.returning(wholeRecord) mod`._ (for { product <- testContext.run(op) - output <- testContext.run(get) + output <- testContext.run(get) } yield (output mustEqual result(product))).runSyncUnsafe() } "Ex 1 insert.returningMany(_.generatedColumn) mod" in { import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output mustEqual result(id.head))).runSyncUnsafe() } @@ -47,8 +46,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 2 update.returningMany(_.singleColumn) mod`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -57,8 +56,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 3 delete.returningMany(wholeRecord)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -67,8 +66,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 4 update.returningMany(query)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PrepareJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PrepareJdbcSpec.scala index f1a5ae0a1..4244f8c5e 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PrepareJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/PrepareJdbcSpec.scala @@ -10,7 +10,7 @@ import io.getquill._ class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter { - implicit val ds: Implicit[DataSource] = Implicit(pool) + implicit val ds: Implicit[DataSource] = Implicit(pool) val context: testContext.underlying.type = testContext.underlying import testContext.underlying._ diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ProductJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ProductJdbcSpec.scala index afd1d5eb3..62089b413 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ProductJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ProductJdbcSpec.scala @@ -21,7 +21,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Insert multiple products" in { val (inserted, product) = (for { - i <- ZIO.collectAll(productEntries.map(product => testContext.run(productInsert(lift(product))))) + i <- ZIO.collectAll(productEntries.map(product => testContext.run(productInsert(lift(product))))) ps <- testContext.run(productById(lift(i(2)))) } yield (i, ps.head)).runSyncUnsafe() @@ -32,7 +32,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Single insert product" in { val (inserted, product) = (for { - i <- testContext.run(productSingleInsert) + i <- testContext.run(productSingleInsert) ps <- testContext.run(productById(lift(i))) } yield (i, ps.head)).runSyncUnsafe() product.description mustEqual "Window" @@ -44,8 +44,8 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val (inserted, returnedProduct) = (for { i <- testContext.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) - } + product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) + } rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -61,7 +61,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { } val (inserted, returnedProduct) = (for { - i <- testContext.run(q1) + i <- testContext.run(q1) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -74,7 +74,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val prd = Product(0L, "test3", 3L) val (inserted, returnedProduct) = (for { - i <- testContext.run(productInsert(lift(prd))) + i <- testContext.run(productInsert(lift(prd))) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ZioJdbcContextSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ZioJdbcContextSpec.scala index 0ce0bf892..bc78c8953 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ZioJdbcContextSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/oracle/ZioJdbcContextSpec.scala @@ -1,7 +1,7 @@ package io.getquill.oracle import io.getquill.ZioSpec -import zio.{ Task, ZIO } +import zio.{Task, ZIO} import javax.sql.DataSource import io.getquill._ import zio.ZLayer @@ -15,7 +15,7 @@ class ZioJdbcContextSpec extends ZioSpec { "success" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } + _ <- testContext.transaction(testContext.run(qr1.insert(_.i -> 33))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -23,11 +23,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) _ <- testContext.transaction { - for { - env <- ZIO.service[Int] - qry <- testContext.run(qr1.insert(_.i -> lift(env))) - } yield qry - } + for { + env <- ZIO.service[Int] + qry <- testContext.run(qr1.insert(_.i -> lift(env))) + } yield qry + } r <- testContext.run(qr1) } yield r).provideSomeLayer(ZLayer.succeed(33)).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -35,11 +35,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) seq <- testContext.transaction { - for { - _ <- testContext.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.stream(qr1)) - } yield s - } + for { + _ <- testContext.run(qr1.insert(_.i -> 33)) + s <- accumulate(testContext.stream(qr1)) + } yield s + } r <- testContext.run(qr1) } yield (seq.map(_.i), r.map(_.i))).runSyncUnsafe() mustEqual ((List(33), List(33))) } @@ -47,32 +47,37 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) e <- testContext.transaction { - testContext.run(qr1.insert(_.i -> 36)) *> - testContext.transaction { - ZIO.collectAll(Seq( - testContext.run(qr1.insert(_.i -> 18)), - ZIO.attempt { - throw new IllegalStateException - } - )) - } - }.catchSome { - case e: Exception => ZIO.attempt(e.getClass.getSimpleName) - } + testContext.run(qr1.insert(_.i -> 36)) *> + testContext.transaction { + ZIO.collectAll( + Seq( + testContext.run(qr1.insert(_.i -> 18)), + ZIO.attempt { + throw new IllegalStateException + } + ) + ) + } + }.catchSome { case e: Exception => + ZIO.attempt(e.getClass.getSimpleName) + } r <- testContext.run(qr1) } yield (e, r.isEmpty)).runSyncUnsafe() mustEqual (("IllegalStateException", true)) } "nested" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } } + _ <- testContext.transaction(testContext.transaction(testContext.run(qr1.insert(_.i -> 33)))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } "prepare" in { - testContext.prepareParams( - "select * from Person where name=? and age > ?", (ps, session) => (List("Sarah", 127), ps) - ).runSyncUnsafe() mustEqual List("127", "'Sarah'") + testContext + .prepareParams( + "select * from Person where name=? and age > ?", + (ps, session) => (List("Sarah", 127), ps) + ) + .runSyncUnsafe() mustEqual List("127", "'Sarah'") } } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ConnectionLeakTest.scala b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ConnectionLeakTest.scala index a69364fdf..e30b7bcf1 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ConnectionLeakTest.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ConnectionLeakTest.scala @@ -6,7 +6,7 @@ import io.getquill.context.sql.ProductSpec import io.getquill.util.LoadConfig import io.getquill.context.ZioJdbc._ import io.getquill.context.qzio.ImplicitSyntax.Implicit -import zio.{ Runtime, Unsafe } +import zio.{Runtime, Unsafe} import io.getquill.jdbczio.Quill import scala.util.Random @@ -15,7 +15,9 @@ import javax.sql.DataSource class ConnectionLeakTest extends ProductSpec with ZioSpec { - implicit val pool: Implicit[ZLayer[Any, Throwable, DataSource]] = Implicit(Quill.DataSource.fromPrefix("testPostgresLeakDB")) + implicit val pool: Implicit[ZLayer[Any, Throwable, DataSource]] = Implicit( + Quill.DataSource.fromPrefix("testPostgresLeakDB") + ) // Only used for connection-amount checking val dataSource = JdbcContextConfig(LoadConfig("testPostgresLeakDB")).dataSource @@ -32,24 +34,27 @@ class ConnectionLeakTest extends ProductSpec with ZioSpec { "insert and select without leaking" in { val result = Unsafe.unsafe { implicit unsafe => - Runtime.default.unsafe.run(context.underlying.transaction { - import context.underlying._ - for { - _ <- context.underlying.run { - quote { - query[Product].insertValue( - lift(Product(1, UUID.randomUUID().toString, Random.nextLong())) - ) - } + Runtime.default.unsafe + .run( + context.underlying.transaction { + import context.underlying._ + for { + _ <- context.underlying.run { + quote { + query[Product].insertValue( + lift(Product(1, UUID.randomUUID().toString, Random.nextLong())) + ) + } + } + result <- context.underlying.run { + query[Product].filter(p => query[Product].map(_.id).max.exists(_ == p.id)) + } + } yield (result) } - result <- context.underlying.run { - query[Product].filter(p => query[Product].map(_.id).max.exists(_ == p.id)) - } - } yield (result) - } - .map(_.headOption.map(_.id)) - .onDataSource - .provide(pool.env)) + .map(_.headOption.map(_.id)) + .onDataSource + .provide(pool.env) + ) .getOrThrow() } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/MultiLevelServiceSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/MultiLevelServiceSpec.scala index 61f5bdbdb..e3adbf241 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/MultiLevelServiceSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/MultiLevelServiceSpec.scala @@ -2,7 +2,7 @@ package io.getquill.postgres import io.getquill.jdbczio.Quill import io.getquill._ -import zio.{ Unsafe, ZIO, ZLayer } +import zio.{Unsafe, ZIO, ZLayer} import java.sql.SQLException import javax.sql.DataSource @@ -24,69 +24,75 @@ class MultiLevelServiceSpec extends AnyFreeSpec with BeforeAndAfterAll with Matc val testContext = new Quill.Postgres(Literal, io.getquill.postgres.pool) import testContext._ Unsafe.unsafe { implicit unsafe => - zio.Runtime.default.unsafe.run( - testContext.transaction { - for { - _ <- testContext.run(query[Person].delete) - _ <- testContext.run(liftQuery(entries).foreach(p => query[Person].insertValue(p))) - } yield () - } - ).getOrThrow() + zio.Runtime.default.unsafe + .run( + testContext.transaction { + for { + _ <- testContext.run(query[Person].delete) + _ <- testContext.run(liftQuery(entries).foreach(p => query[Person].insertValue(p))) + } yield () + } + ) + .getOrThrow() } } case class DataService(quill: Quill[PostgresDialect, Literal]) { - import quill.{ run => qrun, _ } - inline def people = quote { query[Person] } - inline def somePeopleByName = quote { (ps: Query[Person], name: String) => ps.filter(p => p.name == name) } + import quill.{run => qrun, _} + inline def people = quote(query[Person]) + inline def somePeopleByName = quote((ps: Query[Person], name: String) => ps.filter(p => p.name == name)) inline def peopleByNameNative(inline name: String) = people.filter(p => p.name == name) - //inline def peopleByNameNative2(inline name: String) = quote { people.filter(p => p.name == name) } - inline def peopleByName = quote { (name: String) => people.filter(p => p.name == name) } - def getAllPeople(): ZIO[Any, SQLException, List[Person]] = qrun(people) + // inline def peopleByNameNative2(inline name: String) = quote { people.filter(p => p.name == name) } + inline def peopleByName = quote((name: String) => people.filter(p => p.name == name)) + def getAllPeople(): ZIO[Any, SQLException, List[Person]] = qrun(people) def getPeopleByName(name: String): ZIO[Any, SQLException, List[Person]] = qrun(peopleByName(lift(name))) } case class ApplicationLive(dataService: DataService) { import dataService._ - import dataService.quill.{ run => qrun, _ } + import dataService.quill.{run => qrun, _} - inline def joes = quote { peopleByName("Joe") } + inline def joes = quote(peopleByName("Joe")) def getJoes: ZIO[Any, SQLException, List[Person]] = qrun(joes) - def getPeopleByName3(name: String): ZIO[Any, SQLException, List[Person]] = qrun(somePeopleByName(query[Person], lift(name))) + def getPeopleByName3(name: String): ZIO[Any, SQLException, List[Person]] = qrun( + somePeopleByName(query[Person], lift(name)) + ) def getPeopleByName2A(name: String): ZIO[Any, SQLException, List[Person]] = qrun(peopleByNameNative(lift(name))) - def getPeopleByName2(name: String): ZIO[Any, SQLException, List[Person]] = qrun(peopleByName(lift(name))) - def getPeopleByName(name: String): ZIO[Any, SQLException, List[Person]] = dataService.getPeopleByName(name) - def getAllPeople(): ZIO[Any, SQLException, List[Person]] = dataService.getAllPeople() + def getPeopleByName2(name: String): ZIO[Any, SQLException, List[Person]] = qrun(peopleByName(lift(name))) + def getPeopleByName(name: String): ZIO[Any, SQLException, List[Person]] = dataService.getPeopleByName(name) + def getAllPeople(): ZIO[Any, SQLException, List[Person]] = dataService.getAllPeople() } val dataServiceLive = ZLayer.fromFunction(DataService.apply _) val applicationLive = ZLayer.fromFunction(ApplicationLive.apply _) object Application { - def getJoes() = ZIO.serviceWithZIO[ApplicationLive](_.getJoes) - def getPeopleByName3(name: String) = ZIO.serviceWithZIO[ApplicationLive](_.getPeopleByName3(name)) - def getPeopleByName2(name: String) = ZIO.serviceWithZIO[ApplicationLive](_.getPeopleByName2(name)) + def getJoes() = ZIO.serviceWithZIO[ApplicationLive](_.getJoes) + def getPeopleByName3(name: String) = ZIO.serviceWithZIO[ApplicationLive](_.getPeopleByName3(name)) + def getPeopleByName2(name: String) = ZIO.serviceWithZIO[ApplicationLive](_.getPeopleByName2(name)) def getPeopleByName2A(name: String) = ZIO.serviceWithZIO[ApplicationLive](_.getPeopleByName2A(name)) - def getPeopleByName(name: String) = ZIO.serviceWithZIO[ApplicationLive](_.getPeopleByName(name)) - def getAllPeople() = ZIO.serviceWithZIO[ApplicationLive](_.getAllPeople()) + def getPeopleByName(name: String) = ZIO.serviceWithZIO[ApplicationLive](_.getPeopleByName(name)) + def getAllPeople() = ZIO.serviceWithZIO[ApplicationLive](_.getAllPeople()) } "All Composition variations must work" in { val dataSourceLive = ZLayer.succeed(io.getquill.postgres.pool) - val postgresLive = ZLayer.fromFunction(Quill.Postgres(Literal, _: DataSource)) - val combinedLayer = dataSourceLive >>> postgresLive >>> dataServiceLive >>> applicationLive + val postgresLive = ZLayer.fromFunction(Quill.Postgres(Literal, _: DataSource)) + val combinedLayer = dataSourceLive >>> postgresLive >>> dataServiceLive >>> applicationLive val (a, b, c, d, e) = Unsafe.unsafe { implicit unsafe => - zio.Runtime.default.unsafe.run( - (for { - a <- Application.getJoes() - b <- Application.getPeopleByName("Joe") - c <- Application.getPeopleByName2("Joe") - c1 <- Application.getPeopleByName2A("Joe") - d <- Application.getPeopleByName3("Joe") - e <- Application.getAllPeople() - } yield (a, b, c, d, e)).provideLayer(combinedLayer) - ).getOrThrow() + zio.Runtime.default.unsafe + .run( + (for { + a <- Application.getJoes() + b <- Application.getPeopleByName("Joe") + c <- Application.getPeopleByName2("Joe") + c1 <- Application.getPeopleByName2A("Joe") + d <- Application.getPeopleByName3("Joe") + e <- Application.getAllPeople() + } yield (a, b, c, d, e)).provideLayer(combinedLayer) + ) + .getOrThrow() } val joes = entries.filter(_.name == "Joe") diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioJdbcSpec.scala index 9b2290d74..0f6a3db7b 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioJdbcSpec.scala @@ -27,7 +27,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 2 - range simple" in { - testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` + testContext + .run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) + .runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` } "Example 3 - satisfies" in { @@ -39,7 +41,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 5 - compose" in { - testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result` + testContext + .run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))) + .runSyncUnsafe() mustEqual `Ex 5 expected result` } "Example 6 - predicate 0" in { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioReturningSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioReturningSpec.scala index 7cf2f00d5..345f367a7 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioReturningSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PeopleZioReturningSpec.scala @@ -20,7 +20,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 0 insert.returning(_.generatedColumn) mod" in { import `Ex 0 insert.returning(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output.toSet mustEqual result(id).toSet)).runSyncUnsafe() } @@ -29,14 +29,14 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 0.5 insert.returning(wholeRecord) mod`._ (for { product <- testContext.run(op) - output <- testContext.run(get) + output <- testContext.run(get) } yield (output mustEqual result(product))).runSyncUnsafe() } "Ex 1 insert.returningMany(_.generatedColumn) mod" in { import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output mustEqual result(id.head))).runSyncUnsafe() } @@ -45,8 +45,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 2 update.returningMany(_.singleColumn) mod`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -54,8 +54,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 3 delete.returningMany(wholeRecord)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -63,8 +63,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 4 update.returningMany(query)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PostgresJsonSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PostgresJsonSpec.scala index b97340919..7d743cf87 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PostgresJsonSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/PostgresJsonSpec.scala @@ -3,7 +3,7 @@ package io.getquill.postgres import io.getquill._ import zio.Chunk import zio.json.ast.Json -import zio.json.{ DeriveJsonDecoder, DeriveJsonEncoder, JsonDecoder, JsonEncoder } // +import zio.json.{DeriveJsonDecoder, DeriveJsonEncoder, JsonDecoder, JsonEncoder} // class PostgresJsonSpec extends ZioSpec { val context = testContext @@ -15,9 +15,9 @@ class PostgresJsonSpec extends ZioSpec { case class JsonEntity(name: String, value: JsonValue[PersonJson]) case class JsonbEntity(name: String, value: JsonbValue[PersonJsonb]) - val jsonJoe = JsonValue(PersonJson("Joe", 123)) - val jsonValue = JsonEntity("JoeEntity", jsonJoe) - val jsonbJoe = JsonbValue(PersonJsonb("Joe", 123)) + val jsonJoe = JsonValue(PersonJson("Joe", 123)) + val jsonValue = JsonEntity("JoeEntity", jsonJoe) + val jsonbJoe = JsonbValue(PersonJsonb("Joe", 123)) val jsonbValue = JsonbEntity("JoeEntity", jsonbJoe) case class JsonAstEntity(name: String, value: JsonValue[Json]) @@ -51,11 +51,11 @@ class PostgresJsonSpec extends ZioSpec { } "encodes and decodes json ast" - { - val jsonJoe = Json.Obj(Chunk("age" -> Json.Num(123), "name" -> Json.Str("Joe"))) - inline def jsonAstQuery = quote { querySchema[JsonAstEntity]("JsonEntity") } - inline def jsonbAstQuery = quote { querySchema[JsonbAstEntity]("JsonbEntity") } + val jsonJoe = Json.Obj(Chunk("age" -> Json.Num(123), "name" -> Json.Str("Joe"))) + inline def jsonAstQuery = quote(querySchema[JsonAstEntity]("JsonEntity")) + inline def jsonbAstQuery = quote(querySchema[JsonbAstEntity]("JsonbEntity")) - val jsonAstValue = JsonAstEntity("JoeEntity", JsonValue(jsonJoe)) + val jsonAstValue = JsonAstEntity("JoeEntity", JsonValue(jsonJoe)) val jsonbAstValue = JsonbAstEntity("JoeEntity", JsonbValue(jsonJoe)) "json" in { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ProductJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ProductJdbcSpec.scala index 75be37091..2d5863298 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ProductJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ProductJdbcSpec.scala @@ -20,8 +20,8 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val (inserted, product) = (for { i <- testContext.run { - liftQuery(productEntries).foreach(e => productInsert(e)) - } + liftQuery(productEntries).foreach(e => productInsert(e)) + } ps <- testContext.run(productById(lift(i(2)))) } yield (i, ps.head)).runSyncUnsafe() @@ -32,7 +32,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Single insert product" in { val (inserted, product) = (for { - i <- testContext.run(productSingleInsert) + i <- testContext.run(productSingleInsert) ps <- testContext.run(productById(lift(i))) } yield (i, ps.head)).runSyncUnsafe() product.description mustEqual "Window" @@ -44,8 +44,8 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val (inserted, returnedProduct) = (for { i <- testContext.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) - } + product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) + } rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -61,7 +61,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { } val (inserted, returnedProduct) = (for { - i <- testContext.run(q1) + i <- testContext.run(q1) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -74,7 +74,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val prd = Product(0L, "test3", 3L) val (inserted, returnedProduct) = (for { - i <- testContext.run(productInsert(lift(prd))) + i <- testContext.run(productInsert(lift(prd))) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ZioJdbcContextSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ZioJdbcContextSpec.scala index bb2aa3461..c42fa7649 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ZioJdbcContextSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/postgres/ZioJdbcContextSpec.scala @@ -1,7 +1,7 @@ package io.getquill.postgres import io.getquill.ZioSpec -import zio.{ Task, ZIO, ZLayer } +import zio.{Task, ZIO, ZLayer} import io.getquill.context.ZioJdbc._ import javax.sql.DataSource @@ -17,7 +17,7 @@ class ZioJdbcContextSpec extends ZioSpec { "success" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } + _ <- testContext.transaction(testContext.run(qr1.insert(_.i -> 33))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -25,11 +25,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) _ <- testContext.transaction { - for { - env <- ZIO.service[Int] - qry <- testContext.run(qr1.insert(_.i -> lift(env))) - } yield qry - } + for { + env <- ZIO.service[Int] + qry <- testContext.run(qr1.insert(_.i -> lift(env))) + } yield qry + } r <- testContext.run(qr1) } yield r).provideSomeLayer(ZLayer.succeed(33)).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -37,11 +37,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) seq <- testContext.transaction { - for { - _ <- testContext.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.stream(qr1)) - } yield s - } + for { + _ <- testContext.run(qr1.insert(_.i -> 33)) + s <- accumulate(testContext.stream(qr1)) + } yield s + } r <- testContext.run(qr1) } yield (seq.map(_.i), r.map(_.i))).runSyncUnsafe() mustEqual ((List(33), List(33))) } @@ -49,32 +49,37 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) e <- testContext.transaction { - testContext.run(qr1.insert(_.i -> 36)) *> - testContext.transaction { - ZIO.collectAll(Seq( - testContext.run(qr1.insert(_.i -> 18)), - ZIO.attempt { - throw new IllegalStateException - } - )) - } - }.catchSome { - case e: Exception => ZIO.attempt(e.getClass.getSimpleName) - } + testContext.run(qr1.insert(_.i -> 36)) *> + testContext.transaction { + ZIO.collectAll( + Seq( + testContext.run(qr1.insert(_.i -> 18)), + ZIO.attempt { + throw new IllegalStateException + } + ) + ) + } + }.catchSome { case e: Exception => + ZIO.attempt(e.getClass.getSimpleName) + } r <- testContext.run(qr1) } yield (e, r.isEmpty)).runSyncUnsafe() mustEqual (("IllegalStateException", true)) } "nested" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } } + _ <- testContext.transaction(testContext.transaction(testContext.run(qr1.insert(_.i -> 33)))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } "prepare" in { - testContext.prepareParams( - "select * from Person where name=? and age > ?", (ps, session) => (List("Sarah", 127), ps) - ).runSyncUnsafe() mustEqual List("127", "'Sarah'") + testContext + .prepareParams( + "select * from Person where name=? and age > ?", + (ps, session) => (List("Sarah", 127), ps) + ) + .runSyncUnsafe() mustEqual List("127", "'Sarah'") } } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioJdbcSpec.scala index b234b7205..890236a80 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioJdbcSpec.scala @@ -27,7 +27,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 2 - range simple" in { - testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` + testContext + .run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) + .runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` } "Example 3 - satisfies" in { @@ -39,7 +41,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 5 - compose" in { - testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result` + testContext + .run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))) + .runSyncUnsafe() mustEqual `Ex 5 expected result` } "Example 6 - predicate 0" in { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioReturningSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioReturningSpec.scala index 7f6ea7d83..20cf7e4c8 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioReturningSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PeopleZioReturningSpec.scala @@ -5,7 +5,6 @@ import io.getquill._ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { - val context: testContext.type = testContext import testContext._ @@ -21,7 +20,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 0 insert.returning(_.generatedColumn) mod" in { import `Ex 0 insert.returning(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output.toSet mustEqual result(id).toSet)).runSyncUnsafe() } @@ -35,7 +34,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 1 insert.returningMany(_.generatedColumn) mod" in { import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output mustEqual result(id.head))).runSyncUnsafe() } @@ -45,8 +44,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 2 update.returningMany(_.singleColumn) mod`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -61,8 +60,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 4 update.returningMany(query)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PrepareJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PrepareJdbcSpec.scala index a4ad02250..bfd8f0db3 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PrepareJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/PrepareJdbcSpec.scala @@ -2,7 +2,7 @@ package io.getquill.sqlite import io.getquill.context.qzio.ImplicitSyntax.Implicit import javax.sql.DataSource -import java.sql.{ Connection, ResultSet } +import java.sql.{Connection, ResultSet} import io.getquill.PrepareZioJdbcSpecBase import org.scalatest.BeforeAndAfter @@ -10,7 +10,7 @@ import io.getquill._ class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter { - implicit val ds: Implicit[DataSource] = Implicit(pool) + implicit val ds: Implicit[DataSource] = Implicit(pool) val context: testContext.underlying.type = testContext.underlying import testContext.underlying._ diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ProductJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ProductJdbcSpec.scala index c1b0d7a75..771095a47 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ProductJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ProductJdbcSpec.scala @@ -21,7 +21,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Insert multiple products" in { val (inserted, product) = (for { - i <- ZIO.collectAll(productEntries.map(product => testContext.run(productInsert(lift(product))))) + i <- ZIO.collectAll(productEntries.map(product => testContext.run(productInsert(lift(product))))) ps <- testContext.run(productById(lift(i(2)))) } yield (i, ps.head)).runSyncUnsafe() @@ -32,7 +32,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Single insert product" in { val (inserted, product) = (for { - i <- testContext.run(productSingleInsert) + i <- testContext.run(productSingleInsert) ps <- testContext.run(productById(lift(i))) } yield (i, ps.head)).runSyncUnsafe() product.description mustEqual "Window" @@ -44,8 +44,8 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val (inserted, returnedProduct) = (for { i <- testContext.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) - } + product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) + } rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -61,7 +61,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { } val (inserted, returnedProduct) = (for { - i <- testContext.run(q1) + i <- testContext.run(q1) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -74,7 +74,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val prd = Product(0L, "test3", 3L) val (inserted, returnedProduct) = (for { - i <- testContext.run(productInsert(lift(prd))) + i <- testContext.run(productInsert(lift(prd))) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ZioJdbcContextSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ZioJdbcContextSpec.scala index 4bb3e01ab..9b6623bfb 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ZioJdbcContextSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlite/ZioJdbcContextSpec.scala @@ -1,7 +1,7 @@ package io.getquill.sqlite import io.getquill.ZioSpec -import zio.{ Task, ZIO, ZLayer } +import zio.{Task, ZIO, ZLayer} import io.getquill.context.ZioJdbc._ import javax.sql.DataSource import io.getquill._ @@ -15,7 +15,7 @@ class ZioJdbcContextSpec extends ZioSpec { "success" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } + _ <- testContext.transaction(testContext.run(qr1.insert(_.i -> 33))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -23,11 +23,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) _ <- testContext.transaction { - for { - env <- ZIO.service[Int] - qry <- testContext.run(qr1.insert(_.i -> lift(env))) - } yield qry - } + for { + env <- ZIO.service[Int] + qry <- testContext.run(qr1.insert(_.i -> lift(env))) + } yield qry + } r <- testContext.run(qr1) } yield r).provideSomeLayer(ZLayer.succeed(33)).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -35,11 +35,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) seq <- testContext.transaction { - for { - _ <- testContext.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.stream(qr1)) - } yield s - } + for { + _ <- testContext.run(qr1.insert(_.i -> 33)) + s <- accumulate(testContext.stream(qr1)) + } yield s + } r <- testContext.run(qr1) } yield (seq.map(_.i), r.map(_.i))).runSyncUnsafe() mustEqual ((List(33), List(33))) } @@ -47,32 +47,37 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) e <- testContext.transaction { - testContext.run(qr1.insert(_.i -> 36)) *> - testContext.transaction { - ZIO.collectAll(Seq( - testContext.run(qr1.insert(_.i -> 18)), - ZIO.attempt { - throw new IllegalStateException - } - )) - } - }.catchSome { - case e: Exception => ZIO.attempt(e.getClass.getSimpleName) - } + testContext.run(qr1.insert(_.i -> 36)) *> + testContext.transaction { + ZIO.collectAll( + Seq( + testContext.run(qr1.insert(_.i -> 18)), + ZIO.attempt { + throw new IllegalStateException + } + ) + ) + } + }.catchSome { case e: Exception => + ZIO.attempt(e.getClass.getSimpleName) + } r <- testContext.run(qr1) } yield (e, r.isEmpty)).runSyncUnsafe() mustEqual (("IllegalStateException", true)) } "nested" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } } + _ <- testContext.transaction(testContext.transaction(testContext.run(qr1.insert(_.i -> 33)))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } "prepare" in { - testContext.prepareParams( - "select * from Person where name=? and age > ?", (ps, session) => (List("Sarah", 127), ps) - ).runSyncUnsafe() mustEqual List("127", "'Sarah'") + testContext + .prepareParams( + "select * from Person where name=? and age > ?", + (ps, session) => (List("Sarah", 127), ps) + ) + .runSyncUnsafe() mustEqual List("127", "'Sarah'") } } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioJdbcSpec.scala index 855663b7c..6b7c42aa0 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioJdbcSpec.scala @@ -27,7 +27,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 2 - range simple" in { - testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))).runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` + testContext + .run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) + .runSyncUnsafe() should contain theSameElementsAs `Ex 2 expected result` } "Example 3 - satisfies" in { @@ -39,7 +41,9 @@ class PeopleZioJdbcSpec extends PeopleZioSpec { } "Example 5 - compose" in { - testContext.run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))).runSyncUnsafe() mustEqual `Ex 5 expected result` + testContext + .run(`Ex 5 compose`(lift(`Ex 5 param 1`), lift(`Ex 5 param 2`))) + .runSyncUnsafe() mustEqual `Ex 5 expected result` } "Example 6 - predicate 0" in { diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioReturningSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioReturningSpec.scala index 3cfbfe6b4..17acdf435 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioReturningSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PeopleZioReturningSpec.scala @@ -5,7 +5,6 @@ import io.getquill.context.sql.PeopleReturningSpec class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { - val context: testContext.type = testContext import testContext._ @@ -21,7 +20,7 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { "Ex 0 insert.returning(_.generatedColumn) mod" in { import `Ex 0 insert.returning(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output.toSet mustEqual result(id).toSet)).runSyncUnsafe() } @@ -30,14 +29,14 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 0.5 insert.returning(wholeRecord) mod`._ (for { product <- testContext.run(op) - output <- testContext.run(get) + output <- testContext.run(get) } yield (output mustEqual result(product))).runSyncUnsafe() } "Ex 1 insert.returningMany(_.generatedColumn) mod" in { import `Ex 1 insert.returningMany(_.generatedColumn) mod`._ (for { - id <- testContext.run(op) + id <- testContext.run(op) output <- testContext.run(get) } yield (output mustEqual result(id.head))).runSyncUnsafe() } @@ -46,8 +45,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 2 update.returningMany(_.singleColumn) mod`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -55,8 +54,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 3 delete.returningMany(wholeRecord)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } @@ -65,8 +64,8 @@ class PeopleZioReturningSpec extends PeopleReturningSpec with ZioSpec { import `Ex 4 update.returningMany(query)`._ (for { opResult <- testContext.run(op) - _ = opResult.toSet mustEqual expect.toSet - output <- testContext.run(get) + _ = opResult.toSet mustEqual expect.toSet + output <- testContext.run(get) } yield (output.toSet mustEqual result.toSet)).runSyncUnsafe() } } diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PrepareJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PrepareJdbcSpec.scala index ed49ef6eb..f01ca2bdb 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PrepareJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/PrepareJdbcSpec.scala @@ -2,7 +2,7 @@ package io.getquill.sqlserver import io.getquill.context.qzio.ImplicitSyntax.Implicit import javax.sql.DataSource -import java.sql.{ Connection, ResultSet } +import java.sql.{Connection, ResultSet} import io.getquill.PrepareZioJdbcSpecBase import org.scalatest.BeforeAndAfter @@ -10,7 +10,7 @@ import io.getquill._ class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter { - implicit val ds: Implicit[DataSource] = Implicit(pool) + implicit val ds: Implicit[DataSource] = Implicit(pool) val context: testContext.underlying.type = testContext.underlying import testContext.underlying._ @@ -20,7 +20,7 @@ class PrepareJdbcSpec extends PrepareZioJdbcSpecBase with BeforeAndAfter { val prepareQuery = prepare(query[Product]) // TODO Try removing 'inline' and do the old implicit way and see if an error results - inline given InsertMeta[Product] = insertMeta[Product](_.id) + inline given InsertMeta[Product] = insertMeta[Product](_.id) "single" in { val prepareInsert = prepare(query[Product].insertValue(lift(productEntries.head))) diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ProductJdbcSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ProductJdbcSpec.scala index 319307a09..ca04ad0f0 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ProductJdbcSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ProductJdbcSpec.scala @@ -21,7 +21,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Insert multiple products" in { val (inserted, product) = (for { - i <- ZIO.collectAll(productEntries.map(product => testContext.run(productInsert(lift(product))))) + i <- ZIO.collectAll(productEntries.map(product => testContext.run(productInsert(lift(product))))) ps <- testContext.run(productById(lift(i(2)))) } yield (i, ps.head)).runSyncUnsafe() @@ -32,7 +32,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { "Single insert product" in { val (inserted, product) = (for { - i <- testContext.run(productSingleInsert) + i <- testContext.run(productSingleInsert) ps <- testContext.run(productById(lift(i))) } yield (i, ps.head)).runSyncUnsafe() product.description mustEqual "Window" @@ -44,8 +44,8 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val (inserted, returnedProduct) = (for { i <- testContext.run { - product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) - } + product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) + } rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -61,7 +61,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { } val (inserted, returnedProduct) = (for { - i <- testContext.run(q1) + i <- testContext.run(q1) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() @@ -74,7 +74,7 @@ class ProductJdbcSpec extends ProductSpec with ZioSpec { val prd = Product(0L, "test3", 3L) val (inserted, returnedProduct) = (for { - i <- testContext.run(productInsert(lift(prd))) + i <- testContext.run(productInsert(lift(prd))) rps <- testContext.run(productById(lift(i))) } yield (i, rps.head)).runSyncUnsafe() diff --git a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ZioJdbcContextSpec.scala b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ZioJdbcContextSpec.scala index 5f91f0572..fc6c07426 100644 --- a/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ZioJdbcContextSpec.scala +++ b/quill-jdbc-zio/src/test/scala/io/getquill/sqlserver/ZioJdbcContextSpec.scala @@ -1,7 +1,7 @@ package io.getquill.sqlserver import io.getquill.ZioSpec -import zio.{ Task, ZIO, ZLayer } +import zio.{Task, ZIO, ZLayer} import io.getquill.context.ZioJdbc._ import javax.sql.DataSource @@ -16,7 +16,7 @@ class ZioJdbcContextSpec extends ZioSpec { "success" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } + _ <- testContext.transaction(testContext.run(qr1.insert(_.i -> 33))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -24,11 +24,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) _ <- testContext.transaction { - for { - env <- ZIO.service[Int] - qry <- testContext.run(qr1.insert(_.i -> lift(env))) - } yield qry - } + for { + env <- ZIO.service[Int] + qry <- testContext.run(qr1.insert(_.i -> lift(env))) + } yield qry + } r <- testContext.run(qr1) } yield r).provideSomeLayer(ZLayer.succeed(33)).runSyncUnsafe().map(_.i) mustEqual List(33) } @@ -36,11 +36,11 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) seq <- testContext.transaction { - for { - _ <- testContext.run(qr1.insert(_.i -> 33)) - s <- accumulate(testContext.stream(qr1)) - } yield s - } + for { + _ <- testContext.run(qr1.insert(_.i -> 33)) + s <- accumulate(testContext.stream(qr1)) + } yield s + } r <- testContext.run(qr1) } yield (seq.map(_.i), r.map(_.i))).runSyncUnsafe() mustEqual ((List(33), List(33))) } @@ -48,32 +48,37 @@ class ZioJdbcContextSpec extends ZioSpec { (for { _ <- testContext.run(qr1.delete) e <- testContext.transaction { - testContext.run(qr1.insert(_.i -> 36)) *> - testContext.transaction { - ZIO.collectAll(Seq( - testContext.run(qr1.insert(_.i -> 18)), - ZIO.attempt { - throw new IllegalStateException - } - )) - } - }.catchSome { - case e: Exception => ZIO.attempt(e.getClass.getSimpleName) - } + testContext.run(qr1.insert(_.i -> 36)) *> + testContext.transaction { + ZIO.collectAll( + Seq( + testContext.run(qr1.insert(_.i -> 18)), + ZIO.attempt { + throw new IllegalStateException + } + ) + ) + } + }.catchSome { case e: Exception => + ZIO.attempt(e.getClass.getSimpleName) + } r <- testContext.run(qr1) } yield (e, r.isEmpty)).runSyncUnsafe() mustEqual (("IllegalStateException", true)) } "nested" in { (for { _ <- testContext.run(qr1.delete) - _ <- testContext.transaction { testContext.transaction { testContext.run(qr1.insert(_.i -> 33)) } } + _ <- testContext.transaction(testContext.transaction(testContext.run(qr1.insert(_.i -> 33)))) r <- testContext.run(qr1) } yield r).runSyncUnsafe().map(_.i) mustEqual List(33) } "prepare" in { - testContext.prepareParams( - "select * from Person where name=? and age > ?", (ps, session) => (List("Sarah", 127), ps) - ).runSyncUnsafe() mustEqual List("127", "'Sarah'") + testContext + .prepareParams( + "select * from Person where name=? and age > ?", + (ps, session) => (List("Sarah", 127), ps) + ) + .runSyncUnsafe() mustEqual List("127", "'Sarah'") } } } diff --git a/quill-jdbc/src/main/scala/io/getquill/H2JdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/H2JdbcContext.scala index 3707d6f0f..793fd7428 100644 --- a/quill-jdbc/src/main/scala/io/getquill/H2JdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/H2JdbcContext.scala @@ -4,12 +4,12 @@ import java.io.Closeable import javax.sql.DataSource import com.typesafe.config.Config -import io.getquill.context.jdbc.{ JdbcContext, H2JdbcContextBase } +import io.getquill.context.jdbc.{JdbcContext, H2JdbcContextBase} import io.getquill.util.LoadConfig class H2JdbcContext[+N <: NamingStrategy](val naming: N, val dataSource: DataSource) - extends JdbcContext[H2Dialect, N] - with H2JdbcContextBase[H2Dialect, N] { + extends JdbcContext[H2Dialect, N] + with H2JdbcContextBase[H2Dialect, N] { override val idiom: H2Dialect = H2Dialect def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource) def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config)) diff --git a/quill-jdbc/src/main/scala/io/getquill/MysqlJdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/MysqlJdbcContext.scala index 048e1d62e..13e8f6138 100644 --- a/quill-jdbc/src/main/scala/io/getquill/MysqlJdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/MysqlJdbcContext.scala @@ -4,12 +4,12 @@ import java.io.Closeable import javax.sql.DataSource import com.typesafe.config.Config -import io.getquill.context.jdbc.{ JdbcContext, MysqlJdbcContextBase } +import io.getquill.context.jdbc.{JdbcContext, MysqlJdbcContextBase} import io.getquill.util.LoadConfig class MysqlJdbcContext[+N <: NamingStrategy](val naming: N, val dataSource: DataSource) - extends JdbcContext[MySQLDialect, N] - with MysqlJdbcContextBase[MySQLDialect, N] { + extends JdbcContext[MySQLDialect, N] + with MysqlJdbcContextBase[MySQLDialect, N] { override val idiom: MySQLDialect = MySQLDialect def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource) def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config)) diff --git a/quill-jdbc/src/main/scala/io/getquill/OracleJdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/OracleJdbcContext.scala index 3084820cd..50cd969c3 100644 --- a/quill-jdbc/src/main/scala/io/getquill/OracleJdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/OracleJdbcContext.scala @@ -3,13 +3,13 @@ package io.getquill import java.io.Closeable import com.typesafe.config.Config -import io.getquill.context.jdbc.{ JdbcContext, OracleJdbcContextBase } +import io.getquill.context.jdbc.{JdbcContext, OracleJdbcContextBase} import io.getquill.util.LoadConfig import javax.sql.DataSource class OracleJdbcContext[+N <: NamingStrategy](val naming: N, val dataSource: DataSource) - extends JdbcContext[OracleDialect, N] - with OracleJdbcContextBase[OracleDialect, N] { + extends JdbcContext[OracleDialect, N] + with OracleJdbcContextBase[OracleDialect, N] { override val idiom: OracleDialect = OracleDialect def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource) def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config)) diff --git a/quill-jdbc/src/main/scala/io/getquill/PostgresJdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/PostgresJdbcContext.scala index 8309332e9..7fe76e489 100644 --- a/quill-jdbc/src/main/scala/io/getquill/PostgresJdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/PostgresJdbcContext.scala @@ -4,12 +4,12 @@ import java.io.Closeable import javax.sql.DataSource import com.typesafe.config.Config -import io.getquill.context.jdbc.{ JdbcContext, PostgresJdbcContextBase } +import io.getquill.context.jdbc.{JdbcContext, PostgresJdbcContextBase} import io.getquill.util.LoadConfig class PostgresJdbcContext[+N <: NamingStrategy](val naming: N, val dataSource: DataSource) - extends JdbcContext[PostgresDialect, N] - with PostgresJdbcContextBase[PostgresDialect, N] { + extends JdbcContext[PostgresDialect, N] + with PostgresJdbcContextBase[PostgresDialect, N] { override val idiom: PostgresDialect = PostgresDialect def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource) def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config)) diff --git a/quill-jdbc/src/main/scala/io/getquill/SqlServerJdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/SqlServerJdbcContext.scala index 6afb556aa..670f09214 100644 --- a/quill-jdbc/src/main/scala/io/getquill/SqlServerJdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/SqlServerJdbcContext.scala @@ -4,12 +4,12 @@ import java.io.Closeable import javax.sql.DataSource import com.typesafe.config.Config -import io.getquill.context.jdbc.{ JdbcContext, SqlServerJdbcContextBase } +import io.getquill.context.jdbc.{JdbcContext, SqlServerJdbcContextBase} import io.getquill.util.LoadConfig class SqlServerJdbcContext[+N <: NamingStrategy](val naming: N, val dataSource: DataSource) - extends JdbcContext[SQLServerDialect, N] - with SqlServerJdbcContextBase[SQLServerDialect, N] { + extends JdbcContext[SQLServerDialect, N] + with SqlServerJdbcContextBase[SQLServerDialect, N] { override val idiom: SQLServerDialect = SQLServerDialect def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource) def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config)) diff --git a/quill-jdbc/src/main/scala/io/getquill/SqliteJdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/SqliteJdbcContext.scala index 70342cc63..049935848 100644 --- a/quill-jdbc/src/main/scala/io/getquill/SqliteJdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/SqliteJdbcContext.scala @@ -4,12 +4,12 @@ import java.io.Closeable import javax.sql.DataSource import com.typesafe.config.Config -import io.getquill.context.jdbc.{ JdbcContext, SqliteJdbcContextBase } +import io.getquill.context.jdbc.{JdbcContext, SqliteJdbcContextBase} import io.getquill.util.LoadConfig class SqliteJdbcContext[+N <: NamingStrategy](val naming: N, val dataSource: DataSource) - extends JdbcContext[SqliteDialect, N] - with SqliteJdbcContextBase[SqliteDialect, N] { + extends JdbcContext[SqliteDialect, N] + with SqliteJdbcContextBase[SqliteDialect, N] { override val idiom: SqliteDialect = SqliteDialect def this(naming: N, config: JdbcContextConfig) = this(naming, config.dataSource) def this(naming: N, config: Config) = this(naming, JdbcContextConfig(config)) diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayDecoders.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayDecoders.scala index 4cdaebe20..e7661833f 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayDecoders.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayDecoders.scala @@ -4,8 +4,8 @@ import java.sql.Timestamp import java.time.LocalDate import java.util.Date import java.util.UUID -import java.sql.{ Date => SqlDate } -import java.math.{ BigDecimal => JBigDecimal } +import java.sql.{Date => SqlDate} +import java.math.{BigDecimal => JBigDecimal} import io.getquill.generic.ArrayEncoding import io.getquill.util.Messages.fail @@ -17,50 +17,79 @@ import scala.reflect.ClassTag trait ArrayDecoders extends ArrayEncoding { self: JdbcContextTypes[_, _] => - implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayRawDecoder[String, Col] - implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayDecoder[JBigDecimal, BigDecimal, Col](BigDecimal.apply) - implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayRawDecoder[Boolean, Col] - implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayRawDecoder[Byte, Col] - implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayRawDecoder[Short, Col] + implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = + arrayRawDecoder[String, Col] + implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = + arrayDecoder[JBigDecimal, BigDecimal, Col](BigDecimal.apply) + implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = + arrayRawDecoder[Boolean, Col] + implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = + arrayRawDecoder[Byte, Col] + implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = + arrayRawDecoder[Short, Col] implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = arrayRawDecoder[Int, Col] - implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayRawDecoder[Long, Col] - implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayRawDecoder[Float, Col] - implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayRawDecoder[Double, Col] - implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayRawDecoder[Date, Col] - implicit def arrayTimestampDecoder[Col <: Seq[Timestamp]](implicit bf: CBF[Timestamp, Col]): Decoder[Col] = arrayRawDecoder[Timestamp, Col] - implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[SqlDate, LocalDate, Col](_.toLocalDate) - implicit def arrayUuidDecoder[Col <: Seq[UUID]](implicit bf: Factory[UUID, Col]): Decoder[Col] = arrayRawDecoder[UUID, Col] + implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = + arrayRawDecoder[Long, Col] + implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = + arrayRawDecoder[Float, Col] + implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = + arrayRawDecoder[Double, Col] + implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = + arrayRawDecoder[Date, Col] + implicit def arrayTimestampDecoder[Col <: Seq[Timestamp]](implicit bf: CBF[Timestamp, Col]): Decoder[Col] = + arrayRawDecoder[Timestamp, Col] + implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = + arrayDecoder[SqlDate, LocalDate, Col](_.toLocalDate) + implicit def arrayUuidDecoder[Col <: Seq[UUID]](implicit bf: Factory[UUID, Col]): Decoder[Col] = + arrayRawDecoder[UUID, Col] /** * Generic encoder for JDBC arrays. * - * @param mapper retrieved raw types fro JDBC array may be mapped via this mapper to satisfy encoder type - * @param bf builder factory is needed to create instances of decoder's collection - * @tparam I raw type retrieved form JDBC array - * @tparam O mapped type fulfilled in decoder's collection - * @tparam Col seq type - * @return JDBC array decoder + * @param mapper + * retrieved raw types fro JDBC array may be mapped via this mapper to + * satisfy encoder type + * @param bf + * builder factory is needed to create instances of decoder's collection + * @tparam I + * raw type retrieved form JDBC array + * @tparam O + * mapped type fulfilled in decoder's collection + * @tparam Col + * seq type + * @return + * JDBC array decoder */ - def arrayDecoder[I, O, Col <: Seq[O]](mapper: I => O)(implicit bf: CBF[O, Col], tag: ClassTag[I]): Decoder[Col] = { - decoder[Col]((idx: Index, row: ResultRow, session: Session) => { + def arrayDecoder[I, O, Col <: Seq[O]](mapper: I => O)(implicit bf: CBF[O, Col], tag: ClassTag[I]): Decoder[Col] = + decoder[Col] { (idx: Index, row: ResultRow, session: Session) => val arr = row.getArray(idx) if (arr == null) bf.newBuilder.result() - else arr.getArray.asInstanceOf[Array[AnyRef]].foldLeft(bf.newBuilder) { - case (b, x: I) => b += mapper(x) - case (b, x: java.lang.Number) => b += mapper(x.asInstanceOf[I]) - case (_, x) => - fail(s"Retrieved ${x.getClass.getCanonicalName} type from JDBC array, but expected $tag. Re-check your decoder implementation") - }.result() - }) - } + else + arr.getArray + .asInstanceOf[Array[AnyRef]] + .foldLeft(bf.newBuilder) { + case (b, x: I) => b += mapper(x) + case (b, x: java.lang.Number) => b += mapper(x.asInstanceOf[I]) + case (_, x) => + fail( + s"Retrieved ${x.getClass.getCanonicalName} type from JDBC array, but expected $tag. Re-check your decoder implementation" + ) + } + .result() + } /** - * Creates JDBC array decoder for type `T` which is already supported by database as array element. + * Creates JDBC array decoder for type `T` which is already supported by + * database as array element. * - * @param bf builder factory is needed to create instances of decoder's collection - * @tparam T element type - * @tparam Col seq type - * @return JDBC array decoder + * @param bf + * builder factory is needed to create instances of decoder's collection + * @tparam T + * element type + * @tparam Col + * seq type + * @return + * JDBC array decoder */ def arrayRawDecoder[T: ClassTag, Col <: Seq[T]](implicit bf: CBF[T, Col]): Decoder[Col] = arrayDecoder[T, T, Col](identity) diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayEncoders.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayEncoders.scala index 63b14c69b..8eabda4c3 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayEncoders.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayEncoders.scala @@ -1,6 +1,6 @@ package io.getquill.context.jdbc -import java.sql.{ Timestamp, Date => SqlDate } +import java.sql.{Timestamp, Date => SqlDate} import java.sql.Types._ import java.time.LocalDate import java.util.Date @@ -13,60 +13,80 @@ trait ArrayEncoders extends ArrayEncoding { self: JdbcContextTypes[_, _] => implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col](VARCHAR) - implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, Col](parseJdbcType(NUMERIC), _.bigDecimal) - implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col](BOOLEAN) - implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col](TINYINT) - implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col](SMALLINT) - implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayRawEncoder[Int, Col](INTEGER) - implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col](BIGINT) - implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col](FLOAT) - implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col](DOUBLE) - implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayRawEncoder[Date, Col](TIMESTAMP) + implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = + arrayEncoder[BigDecimal, Col](parseJdbcType(NUMERIC), _.bigDecimal) + implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col](BOOLEAN) + implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col](TINYINT) + implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col](SMALLINT) + implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayRawEncoder[Int, Col](INTEGER) + implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col](BIGINT) + implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col](FLOAT) + implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col](DOUBLE) + implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayRawEncoder[Date, Col](TIMESTAMP) implicit def arrayTimestampEncoder[Col <: Seq[Timestamp]]: Encoder[Col] = arrayRawEncoder[Timestamp, Col](TIMESTAMP) - implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](parseJdbcType(DATE), SqlDate.valueOf) + implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = + arrayEncoder[LocalDate, Col](parseJdbcType(DATE), SqlDate.valueOf) implicit def arrayUuidEncoder[Col <: Seq[UUID]]: Encoder[Col] = arrayRawEncoder[UUID, Col]("uuid") /** * Generic encoder for JDBC arrays. * - * @param jdbcType JDBC specific type identification, may be various regarding to JDBC driver - * @param mapper jdbc array accepts AnyRef objects hence a mapper is needed. - * If input type of an element of collection is not comfortable with jdbcType - * then use this mapper to transform to appropriate type before casting to AnyRef - * @tparam T element type - * @tparam Col seq type - * @return JDBC array encoder + * @param jdbcType + * JDBC specific type identification, may be various regarding to JDBC + * driver + * @param mapper + * jdbc array accepts AnyRef objects hence a mapper is needed. If input type + * of an element of collection is not comfortable with jdbcType then use + * this mapper to transform to appropriate type before casting to AnyRef + * @tparam T + * element type + * @tparam Col + * seq type + * @return + * JDBC array encoder */ - def arrayEncoder[T, Col <: Seq[T]](jdbcType: String, mapper: T => AnyRef): Encoder[Col] = { - encoder[Col](ARRAY, (idx: Index, seq: Col, row: PrepareRow) => { - val bf = implicitly[CBF[AnyRef, Array[AnyRef]]] - row.setArray( - idx, - row.getConnection.createArrayOf( - jdbcType, - seq.foldLeft(bf.newBuilder)((b, x) => b += mapper(x)).result() + def arrayEncoder[T, Col <: Seq[T]](jdbcType: String, mapper: T => AnyRef): Encoder[Col] = + encoder[Col]( + ARRAY, + (idx: Index, seq: Col, row: PrepareRow) => { + val bf = implicitly[CBF[AnyRef, Array[AnyRef]]] + row.setArray( + idx, + row.getConnection.createArrayOf( + jdbcType, + seq.foldLeft(bf.newBuilder)((b, x) => b += mapper(x)).result() + ) ) - ) - }) - } + } + ) /** - * Creates JDBC array encoder for type `T` which is already supported by database as array element. + * Creates JDBC array encoder for type `T` which is already supported by + * database as array element. * - * @param jdbcType JDBC specific type identification, may be various regarding to JDBC driver - * @tparam T element type - * @tparam Col seq type - * @return JDBC array encoder + * @param jdbcType + * JDBC specific type identification, may be various regarding to JDBC + * driver + * @tparam T + * element type + * @tparam Col + * seq type + * @return + * JDBC array encoder */ def arrayRawEncoder[T, Col <: Seq[T]](jdbcType: String): Encoder[Col] = arrayEncoder[T, Col](jdbcType, _.asInstanceOf[AnyRef]) /** - * Transform jdbcType int using `parseJdbcType` and calls overloaded method to create Encoder + * Transform jdbcType int using `parseJdbcType` and calls overloaded method to + * create Encoder * - * @param jdbcType java.sql.Types - * @see arrayRawEncoder(jdbcType: String) - * @see JdbcContext#parseJdbcType(jdbcType: String) + * @param jdbcType + * java.sql.Types + * @see + * arrayRawEncoder(jdbcType: String) + * @see + * JdbcContext#parseJdbcType(jdbcType: String) */ def arrayRawEncoder[T, Col <: Seq[T]](jdbcType: Int): Encoder[Col] = arrayRawEncoder[T, Col](parseJdbcType(jdbcType)) diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala index 97cde0204..ff91c3fdc 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BaseContexts.scala @@ -3,27 +3,25 @@ package io.getquill.context.jdbc import io.getquill._ trait PostgresJdbcContextBase[+D <: PostgresDialect, +N <: NamingStrategy] - extends PostgresJdbcTypes[D, N] - with JdbcContextBase[D, N] + extends PostgresJdbcTypes[D, N] + with JdbcContextBase[D, N] -trait H2JdbcContextBase[+D <: H2Dialect, +N <: NamingStrategy] - extends H2JdbcTypes[D, N] - with JdbcContextBase[D, N] +trait H2JdbcContextBase[+D <: H2Dialect, +N <: NamingStrategy] extends H2JdbcTypes[D, N] with JdbcContextBase[D, N] trait MysqlJdbcContextBase[+D <: MySQLDialect, +N <: NamingStrategy] - extends MysqlJdbcTypes[D, N] - with JdbcContextBase[D, N] + extends MysqlJdbcTypes[D, N] + with JdbcContextBase[D, N] trait SqliteJdbcContextBase[+D <: SqliteDialect, +N <: NamingStrategy] - extends SqliteJdbcTypes[D, N] - with SqliteExecuteOverride[D, N] - with JdbcContextBase[D, N] + extends SqliteJdbcTypes[D, N] + with SqliteExecuteOverride[D, N] + with JdbcContextBase[D, N] trait SqlServerJdbcContextBase[+D <: SQLServerDialect, +N <: NamingStrategy] - extends SqlServerJdbcTypes[D, N] - with SqlServerExecuteOverride[N] - with JdbcContextBase[D, N] + extends SqlServerJdbcTypes[D, N] + with SqlServerExecuteOverride[N] + with JdbcContextBase[D, N] trait OracleJdbcContextBase[+D <: OracleDialect, +N <: NamingStrategy] - extends OracleJdbcTypes[D, N] - with JdbcContextBase[D, N] + extends OracleJdbcTypes[D, N] + with JdbcContextBase[D, N] diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BooleanIntEncoding.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BooleanIntEncoding.scala index f66413e21..0d140f9ac 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BooleanIntEncoding.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/BooleanIntEncoding.scala @@ -5,6 +5,7 @@ import java.sql.Types trait BooleanIntEncoding { this: JdbcContextTypes[_, _] => - implicit val booleanEncoder: Encoder[Boolean] = encoder(Types.TINYINT, (index, value, row) => row.setInt(index, if (value) 1 else 0)) + implicit val booleanEncoder: Encoder[Boolean] = + encoder(Types.TINYINT, (index, value, row) => row.setInt(index, if (value) 1 else 0)) implicit val booleanDecoder: Decoder[Boolean] = decoder((index, row, session) => row.getInt(index) == 1) } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Decoders.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Decoders.scala index 0d3782e23..27c9a5235 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Decoders.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Decoders.scala @@ -1,8 +1,8 @@ package io.getquill.context.jdbc -import java.time.{ LocalDate, LocalDateTime } +import java.time.{LocalDate, LocalDateTime} import java.util -import java.util.{ Calendar, TimeZone } +import java.util.{Calendar, TimeZone} import scala.math.BigDecimal.javaBigDecimal2bigDecimal @@ -40,96 +40,79 @@ trait Decoders { JdbcDecoder(mappedBaseDecoder(mapped, d.decoder)) implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] = - JdbcDecoder( - (index, row, session) => { - try { - // According to the JDBC spec, we first need to read the object before `row.wasNull` works - row.getObject(index) - if (row.wasNull()) { - None - } else { - Some(d.decoder(index, row, session)) - } - } catch { - case _: NullPointerException if row.wasNull() => None + JdbcDecoder { (index, row, session) => + try { + // According to the JDBC spec, we first need to read the object before `row.wasNull` works + row.getObject(index) + if (row.wasNull()) { + None + } else { + Some(d.decoder(index, row, session)) } + } catch { + case _: NullPointerException if row.wasNull() => None } - ) + } - implicit val sqlDateDecoder: Decoder[java.sql.Date] = decoder(_.getDate) - implicit val sqlTimeDecoder: Decoder[java.sql.Time] = decoder(_.getTime) + implicit val sqlDateDecoder: Decoder[java.sql.Date] = decoder(_.getDate) + implicit val sqlTimeDecoder: Decoder[java.sql.Time] = decoder(_.getTime) implicit val sqlTimestampDecoder: Decoder[java.sql.Timestamp] = decoder(_.getTimestamp) implicit val stringDecoder: Decoder[String] = decoder(_.getString) implicit val bigDecimalDecoder: Decoder[BigDecimal] = - decoder((index, row, session) => - row.getBigDecimal(index)) - implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) - implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) - implicit val intDecoder: Decoder[Int] = decoder(_.getInt) - implicit val longDecoder: Decoder[Long] = decoder(_.getLong) - implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) - implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) + decoder((index, row, session) => row.getBigDecimal(index)) + implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte) + implicit val shortDecoder: Decoder[Short] = decoder(_.getShort) + implicit val intDecoder: Decoder[Int] = decoder(_.getInt) + implicit val longDecoder: Decoder[Long] = decoder(_.getLong) + implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat) + implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble) implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder(_.getBytes) implicit val dateDecoder: Decoder[util.Date] = - decoder((index, row, session) => - new util.Date(row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).getTime)) + decoder((index, row, session) => new util.Date(row.getTimestamp(index, Calendar.getInstance(dateTimeZone)).getTime)) } trait BasicTimeDecoders extends Decoders { this: JdbcContextTypes[_, _] => implicit val localDateDecoder: Decoder[LocalDate] = - decoder((index, row, session) => - row.getDate(index).toLocalDate) + decoder((index, row, session) => row.getDate(index).toLocalDate) implicit val localTimeDecoder: Decoder[LocalTime] = - decoder((index, row, session) => - row.getTime(index).toLocalTime) + decoder((index, row, session) => row.getTime(index).toLocalTime) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = - decoder((index, row, session) => - row.getTimestamp(index).toLocalDateTime) + decoder((index, row, session) => row.getTimestamp(index).toLocalDateTime) implicit val zonedDateTimeDecoder: Decoder[ZonedDateTime] = - decoder((index, row, session) => - ZonedDateTime.ofInstant(row.getTimestamp(index).toInstant, dateTimeZone.toZoneId)) + decoder((index, row, session) => ZonedDateTime.ofInstant(row.getTimestamp(index).toInstant, dateTimeZone.toZoneId)) implicit val instantDecoder: Decoder[Instant] = - decoder((index, row, session) => - row.getTimestamp(index).toInstant) + decoder((index, row, session) => row.getTimestamp(index).toInstant) implicit val offsetTimeDecoder: Decoder[OffsetTime] = - decoder((index, row, session) => { + decoder { (index, row, session) => val utcLocalTime = row.getTime(index).toLocalTime utcLocalTime.atOffset(ZoneOffset.UTC) - }) + } implicit val offsetDateTimeDecoder: Decoder[OffsetDateTime] = - decoder((index, row, session) => - OffsetDateTime.ofInstant(row.getTimestamp(index).toInstant, dateTimeZone.toZoneId)) + decoder((index, row, session) => OffsetDateTime.ofInstant(row.getTimestamp(index).toInstant, dateTimeZone.toZoneId)) } trait ObjectGenericTimeDecoders extends Decoders { this: JdbcContextTypes[_, _] => implicit val localDateDecoder: Decoder[LocalDate] = - decoder((index, row, session) => - row.getObject(index, classOf[LocalDate])) + decoder((index, row, session) => row.getObject(index, classOf[LocalDate])) implicit val localTimeDecoder: Decoder[LocalTime] = - decoder((index, row, session) => - row.getObject(index, classOf[LocalTime])) + decoder((index, row, session) => row.getObject(index, classOf[LocalTime])) implicit val localDateTimeDecoder: Decoder[LocalDateTime] = - decoder((index, row, session) => - row.getObject(index, classOf[LocalDateTime])) + decoder((index, row, session) => row.getObject(index, classOf[LocalDateTime])) implicit val zonedDateTimeDecoder: Decoder[ZonedDateTime] = - decoder((index, row, session) => - row.getObject(index, classOf[OffsetDateTime]).toZonedDateTime) + decoder((index, row, session) => row.getObject(index, classOf[OffsetDateTime]).toZonedDateTime) implicit val instantDecoder: Decoder[Instant] = - decoder((index, row, session) => - row.getObject(index, classOf[OffsetDateTime]).toInstant) + decoder((index, row, session) => row.getObject(index, classOf[OffsetDateTime]).toInstant) implicit val offsetTimeDecoder: Decoder[OffsetTime] = - decoder((index, row, session) => - row.getObject(index, classOf[OffsetTime])) + decoder((index, row, session) => row.getObject(index, classOf[OffsetTime])) implicit val offsetDateTimeDecoder: Decoder[OffsetDateTime] = - decoder((index, row, session) => - row.getObject(index, classOf[OffsetDateTime])) + decoder((index, row, session) => row.getObject(index, classOf[OffsetDateTime])) } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala index daf33352f..f5bba8175 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/Encoders.scala @@ -1,9 +1,9 @@ package io.getquill.context.jdbc -import java.sql.{ Date, Timestamp, Types } -import java.time.{ LocalDate, LocalDateTime } -import java.util.{ Calendar, TimeZone } -import java.{ sql, util } +import java.sql.{Date, Timestamp, Types} +import java.time.{LocalDate, LocalDateTime} +import java.util.{Calendar, TimeZone} +import java.{sql, util} // Needed as an import in Protoquill but not in Scala2 Quill. Not sure why import io.getquill.MappedEncoding @@ -33,10 +33,13 @@ trait Encoders extends EncodingDsl { } def encoder[T](sqlType: Int, f: (Index, T, PrepareRow) => Unit): Encoder[T] = - JdbcEncoder(sqlType, (index: Index, value: T, row: PrepareRow, session: Session) => { - f(index, value, row) - row - }) + JdbcEncoder( + sqlType, + (index: Index, value: T, row: PrepareRow, session: Session) => { + f(index, value, row) + row + } + ) def encoder[T](sqlType: Int, f: PrepareRow => (Index, T) => Unit): Encoder[T] = encoder(sqlType, (index: Index, value: T, row: PrepareRow) => f(row)(index, value)) @@ -57,96 +60,98 @@ trait Encoders extends EncodingDsl { ) implicit val sqlDateEncoder: Encoder[java.sql.Date] = - encoder(Types.DATE, (index, value, row) => - row.setDate(index, value)) + encoder(Types.DATE, (index, value, row) => row.setDate(index, value)) implicit val sqlTimeEncoder: Encoder[java.sql.Time] = - encoder(Types.TIME, (index, value, row) => - row.setTime(index, value)) + encoder(Types.TIME, (index, value, row) => row.setTime(index, value)) implicit val sqlTimestampEncoder: Encoder[java.sql.Timestamp] = - encoder(Types.TIMESTAMP, (index, value, row) => - row.setTimestamp(index, value)) + encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, value)) implicit val stringEncoder: Encoder[String] = - encoder(Types.VARCHAR, (row: PreparedStatement) => (i: Index, t: String) => row.setString(i, t)) + encoder(Types.VARCHAR, (row: PreparedStatement) => (i: Index, t: String) => row.setString(i, t)) implicit val nullEncoder: Encoder[Null] = - encoder(Types.NULL, (row: PreparedStatement) => (i: Index, t: String) => row.setNull(i, Types.NULL)) + encoder(Types.NULL, (row: PreparedStatement) => (i: Index, t: String) => row.setNull(i, Types.NULL)) implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(Types.NUMERIC, (index, value, row) => row.setBigDecimal(index, value.bigDecimal)) - implicit val byteEncoder: Encoder[Byte] = encoder(Types.TINYINT, _.setByte) - implicit val shortEncoder: Encoder[Short] = encoder(Types.SMALLINT, _.setShort) - implicit val intEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setInt) - implicit val longEncoder: Encoder[Long] = encoder(Types.BIGINT, _.setLong) - implicit val floatEncoder: Encoder[Float] = encoder(Types.FLOAT, _.setFloat) - implicit val doubleEncoder: Encoder[Double] = encoder(Types.DOUBLE, _.setDouble) + implicit val byteEncoder: Encoder[Byte] = encoder(Types.TINYINT, _.setByte) + implicit val shortEncoder: Encoder[Short] = encoder(Types.SMALLINT, _.setShort) + implicit val intEncoder: Encoder[Int] = encoder(Types.INTEGER, _.setInt) + implicit val longEncoder: Encoder[Long] = encoder(Types.BIGINT, _.setLong) + implicit val floatEncoder: Encoder[Float] = encoder(Types.FLOAT, _.setFloat) + implicit val doubleEncoder: Encoder[Double] = encoder(Types.DOUBLE, _.setDouble) implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(Types.VARBINARY, _.setBytes) implicit val dateEncoder: Encoder[util.Date] = - encoder(Types.TIMESTAMP, (index, value, row) => - row.setTimestamp(index, new sql.Timestamp(value.getTime), Calendar.getInstance(dateTimeZone))) + encoder( + Types.TIMESTAMP, + (index, value, row) => + row.setTimestamp(index, new sql.Timestamp(value.getTime), Calendar.getInstance(dateTimeZone)) + ) } trait BasicTimeEncoders extends Encoders { this: JdbcContextTypes[_, _] => implicit val localDateEncoder: Encoder[LocalDate] = - encoder(Types.DATE, (index, value, row) => - row.setDate(index, java.sql.Date.valueOf(value))) + encoder(Types.DATE, (index, value, row) => row.setDate(index, java.sql.Date.valueOf(value))) implicit val localTimeEncoder: Encoder[LocalTime] = - encoder(Types.TIME, (index, value, row) => - row.setTime(index, java.sql.Time.valueOf(value))) + encoder(Types.TIME, (index, value, row) => row.setTime(index, java.sql.Time.valueOf(value))) implicit val localDateTimeEncoder: Encoder[LocalDateTime] = - encoder(Types.TIMESTAMP, (index, value, row) => - row.setTimestamp(index, java.sql.Timestamp.valueOf(value))) + encoder(Types.TIMESTAMP, (index, value, row) => row.setTimestamp(index, java.sql.Timestamp.valueOf(value))) implicit val zonedDateTimeEncoder: Encoder[ZonedDateTime] = - encoder(Types.TIMESTAMP_WITH_TIMEZONE, (index, value, row) => - row.setTimestamp(index, Timestamp.from(value.toInstant))) + encoder( + Types.TIMESTAMP_WITH_TIMEZONE, + (index, value, row) => row.setTimestamp(index, Timestamp.from(value.toInstant)) + ) implicit val instantEncoder: Encoder[Instant] = - encoder(Types.TIMESTAMP_WITH_TIMEZONE, (index, value, row) => - row.setTimestamp(index, Timestamp.from(value))) + encoder(Types.TIMESTAMP_WITH_TIMEZONE, (index, value, row) => row.setTimestamp(index, Timestamp.from(value))) implicit val offseTimeEncoder: Encoder[OffsetTime] = - encoder(Types.TIME, (index, value, row) => - row.setTime(index, java.sql.Time.valueOf(value.withOffsetSameInstant(ZoneOffset.UTC).toLocalTime))) + encoder( + Types.TIME, + (index, value, row) => + row.setTime(index, java.sql.Time.valueOf(value.withOffsetSameInstant(ZoneOffset.UTC).toLocalTime)) + ) implicit val offseDateTimeEncoder: Encoder[OffsetDateTime] = - encoder(Types.TIMESTAMP_WITH_TIMEZONE, (index, value, row) => - row.setTimestamp(index, java.sql.Timestamp.from(value.toInstant))) + encoder( + Types.TIMESTAMP_WITH_TIMEZONE, + (index, value, row) => row.setTimestamp(index, java.sql.Timestamp.from(value.toInstant)) + ) } -/** Encoders for reasonably implemented JDBC contexts that meet the 4.2 specification */ +/** + * Encoders for reasonably implemented JDBC contexts that meet the 4.2 + * specification + */ trait ObjectGenericTimeEncoders extends Encoders { this: JdbcContextTypes[_, _] => - protected def jdbcTypeOfLocalDate = Types.DATE - protected def jdbcTypeOfLocalTime = Types.TIME + protected def jdbcTypeOfLocalDate = Types.DATE + protected def jdbcTypeOfLocalTime = Types.TIME protected def jdbcTypeOfLocalDateTime = Types.TIMESTAMP protected def jdbcTypeOfZonedDateTime = Types.TIMESTAMP_WITH_TIMEZONE protected def jdbcEncodeInstant(value: Instant): Any = value.atOffset(ZoneOffset.UTC) - protected def jdbcTypeOfInstant = Types.TIMESTAMP_WITH_TIMEZONE - protected def jdbcTypeOfOffsetTime = Types.TIME_WITH_TIMEZONE - protected def jdbcTypeOfOffsetDateTime = Types.TIMESTAMP_WITH_TIMEZONE + protected def jdbcTypeOfInstant = Types.TIMESTAMP_WITH_TIMEZONE + protected def jdbcTypeOfOffsetTime = Types.TIME_WITH_TIMEZONE + protected def jdbcTypeOfOffsetDateTime = Types.TIMESTAMP_WITH_TIMEZONE implicit val localDateEncoder: Encoder[LocalDate] = - encoder(jdbcTypeOfLocalDate, (index, value, row) => - row.setObject(index, value, jdbcTypeOfLocalDate)) + encoder(jdbcTypeOfLocalDate, (index, value, row) => row.setObject(index, value, jdbcTypeOfLocalDate)) implicit val localTimeEncoder: Encoder[LocalTime] = - encoder(jdbcTypeOfLocalTime, (index, value, row) => - row.setObject(index, value, jdbcTypeOfLocalTime)) + encoder(jdbcTypeOfLocalTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfLocalTime)) implicit val localDateTimeEncoder: Encoder[LocalDateTime] = - encoder(jdbcTypeOfLocalDateTime, (index, value, row) => - row.setObject(index, value, jdbcTypeOfLocalDateTime)) + encoder(jdbcTypeOfLocalDateTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfLocalDateTime)) implicit val zonedDateTimeEncoder: Encoder[ZonedDateTime] = - encoder(jdbcTypeOfZonedDateTime, (index, value, row) => - row.setObject(index, value.toOffsetDateTime, jdbcTypeOfZonedDateTime)) + encoder( + jdbcTypeOfZonedDateTime, + (index, value, row) => row.setObject(index, value.toOffsetDateTime, jdbcTypeOfZonedDateTime) + ) implicit val instantEncoder: Encoder[Instant] = - encoder(jdbcTypeOfInstant, (index, value, row) => - row.setObject(index, jdbcEncodeInstant(value), jdbcTypeOfInstant)) + encoder(jdbcTypeOfInstant, (index, value, row) => row.setObject(index, jdbcEncodeInstant(value), jdbcTypeOfInstant)) implicit val offseTimeEncoder: Encoder[OffsetTime] = - encoder(jdbcTypeOfOffsetTime, (index, value, row) => - row.setObject(index, value, jdbcTypeOfOffsetTime)) + encoder(jdbcTypeOfOffsetTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfOffsetTime)) implicit val offseDateTimeEncoder: Encoder[OffsetDateTime] = - encoder(jdbcTypeOfOffsetDateTime, (index, value, row) => - row.setObject(index, value, jdbcTypeOfOffsetDateTime)) + encoder(jdbcTypeOfOffsetDateTime, (index, value, row) => row.setObject(index, value, jdbcTypeOfOffsetDateTime)) } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala index c85a4e8c4..64ce295f2 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContext.scala @@ -1,13 +1,13 @@ package io.getquill.context.jdbc import java.io.Closeable -import java.sql.{ Connection, PreparedStatement } +import java.sql.{Connection, PreparedStatement} import javax.sql.DataSource import io.getquill.context.sql.idiom.SqlIdiom import io.getquill._ -import io.getquill.context.{ ExecutionInfo, ProtoContextSecundus, ContextVerbTranslate } +import io.getquill.context.{ExecutionInfo, ProtoContextSecundus, ContextVerbTranslate} -import scala.util.{ DynamicVariable, Try } +import scala.util.{DynamicVariable, Try} import scala.util.control.NonFatal import io.getquill.Quoted import scala.annotation.targetName @@ -15,32 +15,32 @@ import io.getquill.context.ContextVerbTranslate import io.getquill.util.ContextLogger abstract class JdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] - extends JdbcContextBase[Dialect, Naming] - with ProtoContextSecundus[Dialect, Naming] - with ContextVerbTranslate[Dialect, Naming] - { + extends JdbcContextBase[Dialect, Naming] + with ProtoContextSecundus[Dialect, Naming] + with ContextVerbTranslate[Dialect, Naming] { private val logger = ContextLogger(classOf[JdbcContext[_, _]]) // Need to override these with same values as JdbcRunContext because SyncIOMonad imports them. The imported values need to be overridden - override type Result[T] = T - override type RunQueryResult[T] = List[T] - override type RunQuerySingleResult[T] = T - override type RunActionResult = Long - override type RunActionReturningResult[T] = T - override type RunBatchActionResult = List[Long] + override type Result[T] = T + override type RunQueryResult[T] = List[T] + override type RunQuerySingleResult[T] = T + override type RunActionResult = Long + override type RunActionReturningResult[T] = T + override type RunBatchActionResult = List[Long] override type RunBatchActionReturningResult[T] = List[T] - override type Runner = Unit - override type TranslateRunner = Unit + override type Runner = Unit + override type TranslateRunner = Unit override protected def context: Runner = () - def translateContext: TranslateRunner = () + def translateContext: TranslateRunner = () val dataSource: DataSource @targetName("runQueryDefault") inline def run[T](inline quoted: Quoted[Query[T]]): List[T] = InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): List[T] = InternalApi.runQuery(quoted, wrap) + inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): List[T] = + InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): T = InternalApi.runQuerySingle(quoted) @targetName("runAction") @@ -48,19 +48,26 @@ abstract class JdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] @targetName("runActionReturning") inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): T = InternalApi.runActionReturning[E, T](quoted) @targetName("runActionReturningMany") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): List[T] = InternalApi.runActionReturningMany[E, T](quoted) + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): List[T] = + InternalApi.runActionReturningMany[E, T](quoted) @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): List[Long] = InternalApi.runBatchAction(quoted, rowsPerBatch) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): List[Long] = InternalApi.runBatchAction(quoted, rowsPerBatch) @targetName("runBatchActionDefault") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): List[Long] = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): List[Long] = + InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): List[T] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): List[T] = + InternalApi.runBatchActionReturning(quoted, rowsPerBatch) @targetName("runBatchActionReturningDefault") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): List[T] = InternalApi.runBatchActionReturning(quoted, 1) + inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): List[T] = + InternalApi.runBatchActionReturning(quoted, 1) - override def wrap[T](t: => T): T = t + override def wrap[T](t: => T): T = t override def push[A, B](result: A)(f: A => B): B = f(result) - override def seq[A](list: List[A]): List[A] = list + override def seq[A](list: List[A]): List[A] = list protected val currentConnection = new DynamicVariable[Option[Connection]](None) @@ -76,7 +83,9 @@ abstract class JdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] case closeable: java.io.Closeable => closeable.close() case _ => - logger.underlying.warn(s"Could not close the DataSource `$dataSource`. It is not an instance of java.io.Closeable.") + logger.underlying.warn( + s"Could not close the DataSource `$dataSource`. It is not an instance of java.io.Closeable." + ) } def probe(sql: String) = @@ -106,9 +115,8 @@ abstract class JdbcContext[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] } } - override private[getquill] def prepareParams(statement: String, prepare: Prepare): Seq[String] = { + override private[getquill] def prepareParams(statement: String, prepare: Prepare): Seq[String] = withConnectionWrapped { conn => prepare(conn.prepareStatement(statement), conn)._1.reverse.map(prepareParam) } - } } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextBase.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextBase.scala index f1e2ba1c2..dae5c92d6 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextBase.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextBase.scala @@ -2,22 +2,24 @@ package io.getquill.context.jdbc import io.getquill._ import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.context.{ ExecutionInfo, ContextVerbPrepare, ContextVerbPrepareLambda } +import io.getquill.context.{ExecutionInfo, ContextVerbPrepare, ContextVerbPrepareLambda} import java.sql._ import io.getquill.util.ContextLogger trait JdbcContextBase[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] - extends JdbcContextVerbExecute[Dialect, Naming] - with JdbcContextVerbPrepare[Dialect, Naming] - with ContextVerbPrepareLambda[Dialect, Naming] { + extends JdbcContextVerbExecute[Dialect, Naming] + with JdbcContextVerbPrepare[Dialect, Naming] + with ContextVerbPrepareLambda[Dialect, Naming] { // Need to re-define these here or they conflict with staged-prepare imported types - override type PrepareQueryResult = Connection => Result[PreparedStatement] - override type PrepareActionResult = Connection => Result[PreparedStatement] + override type PrepareQueryResult = Connection => Result[PreparedStatement] + override type PrepareActionResult = Connection => Result[PreparedStatement] override type PrepareBatchActionResult = Connection => Result[List[PreparedStatement]] - def constructPrepareQuery(f: Connection => Result[PreparedStatement]): Connection => Result[PreparedStatement] = f + def constructPrepareQuery(f: Connection => Result[PreparedStatement]): Connection => Result[PreparedStatement] = f def constructPrepareAction(f: Connection => Result[PreparedStatement]): Connection => Result[PreparedStatement] = f - def constructPrepareBatchAction(f: Connection => Result[List[PreparedStatement]]): Connection => Result[List[PreparedStatement]] = f + def constructPrepareBatchAction( + f: Connection => Result[List[PreparedStatement]] + ): Connection => Result[List[PreparedStatement]] = f } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala index 92d07befd..c6a2c7ef8 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextTypes.scala @@ -1,46 +1,49 @@ package io.getquill.context.jdbc -import io.getquill.{ NamingStrategy, ReturnAction } -import io.getquill.ReturnAction.{ ReturnColumns, ReturnNothing, ReturnRecord } -import io.getquill.context.{ Context, ExecutionInfo } +import io.getquill.{NamingStrategy, ReturnAction} +import io.getquill.ReturnAction.{ReturnColumns, ReturnNothing, ReturnRecord} +import io.getquill.context.{Context, ExecutionInfo} import io.getquill.context.sql.SqlContext import io.getquill.context.sql.idiom.SqlIdiom import io.getquill.util.ContextLogger -import java.sql.{ Connection, JDBCType, PreparedStatement, ResultSet, Statement } +import java.sql.{Connection, JDBCType, PreparedStatement, ResultSet, Statement} import java.util.TimeZone -trait JdbcContextTypes[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends Context[Dialect, Naming] - with SqlContext[Dialect, Naming] - with Encoders - with Decoders { +trait JdbcContextTypes[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] + extends Context[Dialect, Naming] + with SqlContext[Dialect, Naming] + with Encoders + with Decoders { // Dotty doesn't like that this is defined in both Encoders and Decoders. // Makes us define it here in order to resolve the conflict. - type Index = Int + type Index = Int type PrepareRow = PreparedStatement - type ResultRow = ResultSet - type Session = Connection - type Runner = Unit + type ResultRow = ResultSet + type Session = Connection + type Runner = Unit override type NullChecker = JdbcNullChecker class JdbcNullChecker extends BaseNullChecker { - override def apply(index: Int, row: ResultSet): Boolean = { + override def apply(index: Int, row: ResultSet): Boolean = // Note that JDBC-rows are 1-indexed row.getObject(index + 1) == null - } } implicit val nullChecker: JdbcNullChecker = new JdbcNullChecker() protected val dateTimeZone = TimeZone.getDefault /** - * Parses instances of java.sql.Types to string form so it can be used in creation of sql arrays. - * Some databases does not support each of generic types, hence it's welcome to override this method - * and provide alternatives to non-existent types. + * Parses instances of java.sql.Types to string form so it can be used in + * creation of sql arrays. Some databases does not support each of generic + * types, hence it's welcome to override this method and provide alternatives + * to non-existent types. * - * @param intType one of java.sql.Types - * @return JDBC type in string form + * @param intType + * one of java.sql.Types + * @return + * JDBC type in string form */ def parseJdbcType(intType: Int): String = JDBCType.valueOf(intType).getName -} \ No newline at end of file +} diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala index d77021f41..c4efa9bd0 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbExecute.scala @@ -1,25 +1,26 @@ package io.getquill.context.jdbc -import io.getquill.{ NamingStrategy, ReturnAction } -import io.getquill.ReturnAction.{ ReturnColumns, ReturnNothing, ReturnRecord } -import io.getquill.context.{ Context, ExecutionInfo } +import io.getquill.{NamingStrategy, ReturnAction} +import io.getquill.ReturnAction.{ReturnColumns, ReturnNothing, ReturnRecord} +import io.getquill.context.{Context, ExecutionInfo} import io.getquill.context.sql.SqlContext import io.getquill.context.sql.idiom.SqlIdiom import io.getquill.util.ContextLogger -import java.sql.{ Connection, JDBCType, PreparedStatement, ResultSet, Statement } +import java.sql.{Connection, JDBCType, PreparedStatement, ResultSet, Statement} import java.util.TimeZone -trait JdbcContextVerbExecute[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] extends JdbcContextTypes[Dialect, Naming] { +trait JdbcContextVerbExecute[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] + extends JdbcContextTypes[Dialect, Naming] { // These type overrides are not required for JdbcRunContext in Scala2-Quill but it's a typing error. It only works // because executeQuery is not actually defined in Context.scala therefore typing doesn't have // to be correct on the base-level. Same issue with RunActionResult and others - override type RunQueryResult[T] = List[T] - override type RunQuerySingleResult[T] = T - override type RunActionResult = Long - override type RunActionReturningResult[T] = T - override type RunBatchActionResult = List[Long] + override type RunQueryResult[T] = List[T] + override type RunQuerySingleResult[T] = T + override type RunActionResult = Long + override type RunActionReturningResult[T] = T + override type RunBatchActionResult = List[Long] override type RunBatchActionReturningResult[T] = List[T] private val logger = ContextLogger(classOf[JdbcContextVerbExecute[_, _]]) @@ -33,7 +34,10 @@ trait JdbcContextVerbExecute[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] ex withConnection(conn => wrap(f(conn))) // Not overridden in JdbcRunContext in Scala2-Quill because this method is not defined in the context - override def executeAction(sql: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): Result[Long] = + override def executeAction( + sql: String, + prepare: Prepare = identityPrepare + )(info: ExecutionInfo, dc: Runner): Result[Long] = withConnectionWrapped { conn => val (params, ps) = prepare(conn.prepareStatement(sql), conn) logger.logQuery(sql, params) @@ -41,7 +45,11 @@ trait JdbcContextVerbExecute[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] ex } // Not overridden in JdbcRunContext in Scala2-Quill because this method is not defined in the context - override def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): Result[List[T]] = + override def executeQuery[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): Result[List[T]] = withConnectionWrapped { conn => val (params, ps) = prepare(conn.prepareStatement(sql), conn) logger.logQuery(sql, params) @@ -50,15 +58,29 @@ trait JdbcContextVerbExecute[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] ex } // Not overridden in JdbcRunContext in Scala2-Quill because this method is not defined in the context - override def executeQuerySingle[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): Result[T] = + override def executeQuerySingle[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): Result[T] = handleSingleWrappedResult(sql, executeQuery(sql, prepare, extractor)(info, dc)) // Not overridden in JdbcRunContext in Scala2-Quill because this method is not defined in the context - override def executeActionReturning[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): Result[O] = + override def executeActionReturning[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): Result[O] = push(executeActionReturningMany(sql, prepare, extractor, returningBehavior)(info, dc))(handleSingleResult(sql, _)) // Not overridden in JdbcRunContext in Scala2-Quill because this method is not defined in the context - override def executeActionReturningMany[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): Result[List[O]] = + override def executeActionReturningMany[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): Result[List[O]] = withConnectionWrapped { conn => val (params, ps) = prepare(prepareWithReturning(sql, conn, returningBehavior), conn) logger.logQuery(sql, params) @@ -75,32 +97,33 @@ trait JdbcContextVerbExecute[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] ex def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): Result[List[Long]] = withConnectionWrapped { conn => - groups.flatMap { - case BatchGroup(sql, prepare) => - val ps = conn.prepareStatement(sql) - //logger.underlying.debug("Batch: {}", sql.take(200) + (if (sql.length > 200) "..." else "")) - prepare.foreach { f => - val (params, _) = f(ps, conn) - logger.logBatchItem(sql, params) - ps.addBatch() - } - ps.executeBatch().map(_.toLong) + groups.flatMap { case BatchGroup(sql, prepare) => + val ps = conn.prepareStatement(sql) + // logger.underlying.debug("Batch: {}", sql.take(200) + (if (sql.length > 200) "..." else "")) + prepare.foreach { f => + val (params, _) = f(ps, conn) + logger.logBatchItem(sql, params) + ps.addBatch() + } + ps.executeBatch().map(_.toLong) } } - def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): Result[List[T]] = + def executeBatchActionReturning[T]( + groups: List[BatchGroupReturning], + extractor: Extractor[T] + )(info: ExecutionInfo, dc: Runner): Result[List[T]] = withConnectionWrapped { conn => - groups.flatMap { - case BatchGroupReturning(sql, returningBehavior, prepare) => - val ps = prepareWithReturning(sql, conn, returningBehavior) - logger.underlying.debug("Batch: {}", sql) - prepare.foreach { f => - val (params, _) = f(ps, conn) - logger.logBatchItem(sql, params) - ps.addBatch() - } - ps.executeBatch() - extractResult(ps.getGeneratedKeys, conn, extractor) + groups.flatMap { case BatchGroupReturning(sql, returningBehavior, prepare) => + val ps = prepareWithReturning(sql, conn, returningBehavior) + logger.underlying.debug("Batch: {}", sql) + prepare.foreach { f => + val (params, _) = f(ps, conn) + logger.logBatchItem(sql, params) + ps.addBatch() + } + ps.executeBatch() + extractResult(ps.getGeneratedKeys, conn, extractor) } } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbPrepare.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbPrepare.scala index 7b36b52b7..36c2c5685 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbPrepare.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/JdbcContextVerbPrepare.scala @@ -2,18 +2,17 @@ package io.getquill.context.jdbc import io.getquill._ import io.getquill.context.sql.idiom.SqlIdiom -import io.getquill.context.{ ExecutionInfo, ContextVerbPrepare, ContextVerbPrepareLambda } +import io.getquill.context.{ExecutionInfo, ContextVerbPrepare, ContextVerbPrepareLambda} import java.sql._ import io.getquill.util.ContextLogger - trait JdbcContextVerbPrepare[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] - extends ContextVerbPrepare[Dialect, Naming] - with JdbcContextTypes[Dialect, Naming] { + extends ContextVerbPrepare[Dialect, Naming] + with JdbcContextTypes[Dialect, Naming] { - override type PrepareQueryResult = Connection => Result[PreparedStatement] - override type PrepareActionResult = Connection => Result[PreparedStatement] + override type PrepareQueryResult = Connection => Result[PreparedStatement] + override type PrepareActionResult = Connection => Result[PreparedStatement] override type PrepareBatchActionResult = Connection => Result[List[PreparedStatement]] def constructPrepareQuery(f: Connection => Result[PreparedStatement]): PrepareQueryResult @@ -26,33 +25,40 @@ trait JdbcContextVerbPrepare[+Dialect <: SqlIdiom, +Naming <: NamingStrategy] def push[A, B](result: Result[A])(f: A => B): Result[B] def seq[A](list: List[Result[A]]): Result[List[A]] - def prepareQuery(sql: String, prepare: Prepare = identityPrepare)(executionInfo: ExecutionInfo, dc: Runner): PrepareQueryResult = + def prepareQuery(sql: String, prepare: Prepare = identityPrepare)( + executionInfo: ExecutionInfo, + dc: Runner + ): PrepareQueryResult = constructPrepareQuery(prepareSingle(sql, prepare)(executionInfo, dc)) - def prepareAction(sql: String, prepare: Prepare = identityPrepare)(executionInfo: ExecutionInfo, dc: Runner): PrepareActionResult = + def prepareAction(sql: String, prepare: Prepare = identityPrepare)( + executionInfo: ExecutionInfo, + dc: Runner + ): PrepareActionResult = constructPrepareAction(prepareSingle(sql, prepare)(executionInfo, dc)) - def prepareSingle(sql: String, prepare: Prepare = identityPrepare)(executionInfo: ExecutionInfo, dc: Runner): Connection => Result[PreparedStatement] = - (conn: Connection) => wrap { - val (params, ps) = prepare(conn.prepareStatement(sql), conn) - logger.logQuery(sql, params) - ps - } + def prepareSingle( + sql: String, + prepare: Prepare = identityPrepare + )(executionInfo: ExecutionInfo, dc: Runner): Connection => Result[PreparedStatement] = + (conn: Connection) => + wrap { + val (params, ps) = prepare(conn.prepareStatement(sql), conn) + logger.logQuery(sql, params) + ps + } def prepareBatchAction(groups: List[BatchGroup])(executionInfo: ExecutionInfo, dc: Runner): PrepareBatchActionResult = - constructPrepareBatchAction { - (session: Connection) => - seq { - val batches = groups.flatMap { - case BatchGroup(sql, prepares) => - prepares.map(sql -> _) - } - batches.map { - case (sql, prepare) => - val prepareSql = prepareSingle(sql, prepare)(executionInfo, dc) - prepareSql(session) - } + constructPrepareBatchAction { (session: Connection) => + seq { + val batches = groups.flatMap { case BatchGroup(sql, prepares) => + prepares.map(sql -> _) + } + batches.map { case (sql, prepare) => + val prepareSql = prepareSingle(sql, prepare)(executionInfo, dc) + prepareSql(session) } + } } } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ResultSetExtractor.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ResultSetExtractor.scala index fb72c35b5..000450a58 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ResultSetExtractor.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/ResultSetExtractor.scala @@ -7,11 +7,20 @@ import scala.annotation.tailrec // TODO In the Scala 2 Quill, move this module to portable object ResultSetExtractor { - private[getquill] final def apply[T](rs: ResultSet, conn: Connection, extractor: (ResultSet, Connection) => T): List[T] = + private[getquill] final def apply[T]( + rs: ResultSet, + conn: Connection, + extractor: (ResultSet, Connection) => T + ): List[T] = extractResult(rs, conn, extractor, List()) @tailrec - private[getquill] final def extractResult[T](rs: ResultSet, conn: Connection, extractor: (ResultSet, Connection) => T, acc: List[T]): List[T] = + private[getquill] final def extractResult[T]( + rs: ResultSet, + conn: Connection, + extractor: (ResultSet, Connection) => T, + acc: List[T] + ): List[T] = if (rs.next) extractResult(rs, conn, extractor, extractor(rs, conn) :: acc) else diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala index b2511b385..65dab40b7 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/SimplifiedContexts.scala @@ -1,17 +1,18 @@ package io.getquill.context.jdbc -import java.sql.{ Connection, Types } +import java.sql.{Connection, Types} import io.getquill._ import io.getquill.context.ExecutionInfo import io.getquill.util.ContextLogger -trait PostgresJdbcTypes[+D <: PostgresDialect, +N <: NamingStrategy] extends JdbcContextTypes[D, N] - with ObjectGenericTimeEncoders - with ObjectGenericTimeDecoders - with BooleanObjectEncoding - with UUIDObjectEncoding - with ArrayDecoders - with ArrayEncoders { +trait PostgresJdbcTypes[+D <: PostgresDialect, +N <: NamingStrategy] + extends JdbcContextTypes[D, N] + with ObjectGenericTimeEncoders + with ObjectGenericTimeDecoders + with BooleanObjectEncoding + with UUIDObjectEncoding + with ArrayDecoders + with ArrayEncoders { val idiom: D @@ -26,34 +27,37 @@ trait PostgresJdbcTypes[+D <: PostgresDialect, +N <: NamingStrategy] extends Jdb } } -trait H2JdbcTypes[+D <: H2Dialect, +N <: NamingStrategy] extends JdbcContextTypes[D, N] - with ObjectGenericTimeEncoders - with ObjectGenericTimeDecoders - with BooleanObjectEncoding - with UUIDObjectEncoding { +trait H2JdbcTypes[+D <: H2Dialect, +N <: NamingStrategy] + extends JdbcContextTypes[D, N] + with ObjectGenericTimeEncoders + with ObjectGenericTimeDecoders + with BooleanObjectEncoding + with UUIDObjectEncoding { val idiom: D } -trait MysqlJdbcTypes[+D <: MySQLDialect, +N <: NamingStrategy] extends JdbcContextTypes[D, N] - with ObjectGenericTimeEncoders - with ObjectGenericTimeDecoders - with BooleanObjectEncoding - with UUIDStringEncoding { +trait MysqlJdbcTypes[+D <: MySQLDialect, +N <: NamingStrategy] + extends JdbcContextTypes[D, N] + with ObjectGenericTimeEncoders + with ObjectGenericTimeDecoders + with BooleanObjectEncoding + with UUIDStringEncoding { - protected override def jdbcTypeOfZonedDateTime = Types.TIMESTAMP - protected override def jdbcTypeOfInstant = Types.TIMESTAMP - protected override def jdbcTypeOfOffsetTime = Types.TIME + protected override def jdbcTypeOfZonedDateTime = Types.TIMESTAMP + protected override def jdbcTypeOfInstant = Types.TIMESTAMP + protected override def jdbcTypeOfOffsetTime = Types.TIME protected override def jdbcTypeOfOffsetDateTime = Types.TIMESTAMP val idiom: D } -trait SqliteJdbcTypes[+D <: SqliteDialect, +N <: NamingStrategy] extends JdbcContextTypes[D, N] - with BasicTimeEncoders - with BasicTimeDecoders - with BooleanObjectEncoding - with UUIDObjectEncoding { +trait SqliteJdbcTypes[+D <: SqliteDialect, +N <: NamingStrategy] + extends JdbcContextTypes[D, N] + with BasicTimeEncoders + with BasicTimeDecoders + with BooleanObjectEncoding + with UUIDObjectEncoding { val idiom: D } @@ -77,23 +81,25 @@ trait SqliteExecuteOverride[+D <: SqliteDialect, +N <: NamingStrategy] extends J conn.setAutoCommit(wasAutoCommit) } - override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): Result[List[T]] = + override def executeBatchActionReturning[T]( + groups: List[BatchGroupReturning], + extractor: Extractor[T] + )(info: ExecutionInfo, dc: Runner): Result[List[T]] = withConnectionWrapped { conn => logger.underlying.warn( "Sqlite does not support Batch-Actions with returning-keys. Quill will attempt to emulate this function with single-row inserts inside a transaction but using this API is not recommended." ) - groups.flatMap { - case BatchGroupReturning(sql, returningBehavior, prepare) => - val ps = conn.prepareStatement(sql, java.sql.Statement.RETURN_GENERATED_KEYS) - logger.underlying.debug("Batch: {}", sql) - runInTransaction(conn) { - prepare.flatMap { f => - val (params, _) = f(ps, conn) - logger.logBatchItem(sql, params) - ps.executeUpdate() - extractResult(ps.getGeneratedKeys(), conn, extractor) - } + groups.flatMap { case BatchGroupReturning(sql, returningBehavior, prepare) => + val ps = conn.prepareStatement(sql, java.sql.Statement.RETURN_GENERATED_KEYS) + logger.underlying.debug("Batch: {}", sql) + runInTransaction(conn) { + prepare.flatMap { f => + val (params, _) = f(ps, conn) + logger.logBatchItem(sql, params) + ps.executeUpdate() + extractResult(ps.getGeneratedKeys(), conn, extractor) } + } } } } @@ -102,65 +108,74 @@ trait SqlServerExecuteOverride[+N <: NamingStrategy] extends JdbcContextVerbExec private val logger = ContextLogger(classOf[SqlServerExecuteOverride[_]]) - override def executeActionReturningMany[O](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[O], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): Result[List[O]] = + override def executeActionReturningMany[O]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[O], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): Result[List[O]] = withConnectionWrapped { conn => val (params, ps) = prepare(prepareWithReturning(sql, conn, returningBehavior), conn) logger.logQuery(sql, params) extractResult(ps.executeQuery, conn, extractor) } - override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): Result[List[T]] = + override def executeBatchActionReturning[T]( + groups: List[BatchGroupReturning], + extractor: Extractor[T] + )(info: ExecutionInfo, dc: Runner): Result[List[T]] = withConnectionWrapped { conn => - groups.flatMap { - case BatchGroupReturning(sql, returningBehavior, prepare) => - val ps = conn.prepareStatement(sql, java.sql.Statement.RETURN_GENERATED_KEYS) - logger.underlying.debug("Batch: {}", sql) - val outputs = - prepare.flatMap { f => - val (params, _) = f(ps, conn) - logger.logBatchItem(sql, params) - ps.addBatch() - // The SQL Server drive has no ability to either go getGeneratedKeys or executeQuery - // at the end of a sequence of addBatch calls to get all inserted keys/executed queries - // (whether a `OUTPUT` clause is used in the Query or not). That means that in order - // be able to get any results, we need to use extractResult(ps.executeQuery, ...) - // on every single inserted batch! See the following mssql-jdbc issues for more detail: - // https://github.com/microsoft/mssql-jdbc/issues/358 - // https://github.com/Microsoft/mssql-jdbc/issues/245 - // Also note that Slick specifically mentions that returning-keys is generally - // not supported when jdbc-batching is used: - // https://github.com/slick/slick/blob/06ccee3cdc0722adeb8bb0658afb4a0d3524b119/slick/src/main/scala/slick/jdbc/JdbcActionComponent.scala#L654 - // Therefore slick falls back to single-row-insert batching when insertion with getGeneratedKeys is used - // - // However, in ProtoQuill we can do a little better. In this case we take advantage of multi-row inserts - // (via multiple VALUES clauses) each of which is a an element of the `prepares` list. That way, we only - // need to execute `extractResult(ps.executeQuery(),...)` once per every insert-query (where each query - // could potentially have 1000+ insert-rows via 1000 VALUES-clauses). This radically decreases - // the number of calls that need to be made to get back IDs (and other data) of the inserted rows. - extractResult(ps.executeQuery(), conn, extractor) - } - outputs + groups.flatMap { case BatchGroupReturning(sql, returningBehavior, prepare) => + val ps = conn.prepareStatement(sql, java.sql.Statement.RETURN_GENERATED_KEYS) + logger.underlying.debug("Batch: {}", sql) + val outputs = + prepare.flatMap { f => + val (params, _) = f(ps, conn) + logger.logBatchItem(sql, params) + ps.addBatch() + // The SQL Server drive has no ability to either go getGeneratedKeys or executeQuery + // at the end of a sequence of addBatch calls to get all inserted keys/executed queries + // (whether a `OUTPUT` clause is used in the Query or not). That means that in order + // be able to get any results, we need to use extractResult(ps.executeQuery, ...) + // on every single inserted batch! See the following mssql-jdbc issues for more detail: + // https://github.com/microsoft/mssql-jdbc/issues/358 + // https://github.com/Microsoft/mssql-jdbc/issues/245 + // Also note that Slick specifically mentions that returning-keys is generally + // not supported when jdbc-batching is used: + // https://github.com/slick/slick/blob/06ccee3cdc0722adeb8bb0658afb4a0d3524b119/slick/src/main/scala/slick/jdbc/JdbcActionComponent.scala#L654 + // Therefore slick falls back to single-row-insert batching when insertion with getGeneratedKeys is used + // + // However, in ProtoQuill we can do a little better. In this case we take advantage of multi-row inserts + // (via multiple VALUES clauses) each of which is a an element of the `prepares` list. That way, we only + // need to execute `extractResult(ps.executeQuery(),...)` once per every insert-query (where each query + // could potentially have 1000+ insert-rows via 1000 VALUES-clauses). This radically decreases + // the number of calls that need to be made to get back IDs (and other data) of the inserted rows. + extractResult(ps.executeQuery(), conn, extractor) + } + outputs } } } -trait SqlServerJdbcTypes[+D <: SQLServerDialect, +N <: NamingStrategy] extends JdbcContextTypes[D, N] - with ObjectGenericTimeEncoders - with ObjectGenericTimeDecoders - with BooleanObjectEncoding - with UUIDStringEncoding { +trait SqlServerJdbcTypes[+D <: SQLServerDialect, +N <: NamingStrategy] + extends JdbcContextTypes[D, N] + with ObjectGenericTimeEncoders + with ObjectGenericTimeDecoders + with BooleanObjectEncoding + with UUIDStringEncoding { val idiom: D } -trait OracleJdbcTypes[+D <: OracleDialect, +N <: NamingStrategy] extends JdbcContextTypes[D, N] - with ObjectGenericTimeEncoders - with ObjectGenericTimeDecoders - with BooleanIntEncoding - with UUIDStringEncoding { +trait OracleJdbcTypes[+D <: OracleDialect, +N <: NamingStrategy] + extends JdbcContextTypes[D, N] + with ObjectGenericTimeEncoders + with ObjectGenericTimeDecoders + with BooleanIntEncoding + with UUIDStringEncoding { // Normally it is Types.TIME by in that case Oracle truncates the milliseconds - protected override def jdbcTypeOfLocalTime = Types.TIMESTAMP + protected override def jdbcTypeOfLocalTime = Types.TIMESTAMP protected override def jdbcTypeOfOffsetTime = Types.TIME val idiom: D diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDObjectEncoding.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDObjectEncoding.scala index 8da53f62d..ca73c63bb 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDObjectEncoding.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDObjectEncoding.scala @@ -5,6 +5,8 @@ import java.util.UUID trait UUIDObjectEncoding { this: JdbcContextTypes[_, _] => - implicit val uuidEncoder: Encoder[UUID] = encoder(Types.OTHER, (index, value, row) => row.setObject(index, value, Types.OTHER)) - implicit val uuidDecoder: Decoder[UUID] = decoder((index, row, conn) => UUID.fromString(row.getObject(index).toString)) + implicit val uuidEncoder: Encoder[UUID] = + encoder(Types.OTHER, (index, value, row) => row.setObject(index, value, Types.OTHER)) + implicit val uuidDecoder: Decoder[UUID] = + decoder((index, row, conn) => UUID.fromString(row.getObject(index).toString)) } diff --git a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDStringEncoding.scala b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDStringEncoding.scala index f9f34c7a1..f19983e54 100644 --- a/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDStringEncoding.scala +++ b/quill-jdbc/src/main/scala/io/getquill/context/jdbc/UUIDStringEncoding.scala @@ -5,6 +5,7 @@ import java.util.UUID trait UUIDStringEncoding { this: JdbcContextTypes[_, _] => - implicit val uuidEncoder: Encoder[UUID] = encoder(Types.VARCHAR, (index, value, row) => row.setString(index, value.toString)) + implicit val uuidEncoder: Encoder[UUID] = + encoder(Types.VARCHAR, (index, value, row) => row.setString(index, value.toString)) implicit val uuidDecoder: Decoder[UUID] = decoder((index, row, conn) => UUID.fromString(row.getString(index))) } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/JdbcContextConfigSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/JdbcContextConfigSpec.scala index e29182f1b..f3377b227 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/JdbcContextConfigSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/JdbcContextConfigSpec.scala @@ -10,4 +10,4 @@ // JdbcContextConfig(ConfigFactory.empty()).dataSource // } // } -// } \ No newline at end of file +// } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/PrepareJdbcSpecBase.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/PrepareJdbcSpecBase.scala index 5e4327201..f4e93acab 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/PrepareJdbcSpecBase.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/PrepareJdbcSpecBase.scala @@ -1,10 +1,10 @@ package io.getquill.context.jdbc -import java.sql.{ Connection, PreparedStatement, ResultSet } +import java.sql.{Connection, PreparedStatement, ResultSet} import io.getquill.context.sql.ProductSpec import io.getquill.util.Using.Manager import org.scalactic.Equality -import scala.util.{ Success, Failure } +import scala.util.{Success, Failure} import io.getquill.generic.GenericDecoder import io.getquill.generic.DecodingType.Generic @@ -41,7 +41,7 @@ trait PrepareJdbcSpecBase extends ProductSpec { def batchInsert(conn: => Connection)(prep: Connection => List[PreparedStatement]) = { val r = Manager { use => - val c = use(conn) + val c = use(conn) val st = prep(c) appendExecuteSequence(st) } @@ -51,9 +51,11 @@ trait PrepareJdbcSpecBase extends ProductSpec { } } - def extractResults[T](conn: => Connection)(prep: Connection => PreparedStatement)(extractor: (ResultSet, Connection) => T) = { + def extractResults[T]( + conn: => Connection + )(prep: Connection => PreparedStatement)(extractor: (ResultSet, Connection) => T) = { val r = Manager { use => - val c = use(conn) + val c = use(conn) val st = use(prep(c)) val rs = st.executeQuery() ResultSetExtractor(rs, c, extractor) @@ -67,12 +69,11 @@ trait PrepareJdbcSpecBase extends ProductSpec { def extractProducts(conn: => Connection)(prep: Connection => PreparedStatement): List[Product] = extractResults(conn)(prep)(productExtractor) - def appendExecuteSequence(actions: => List[PreparedStatement]) = { + def appendExecuteSequence(actions: => List[PreparedStatement]) = Manager { use => actions.map { stmt => val s = use(stmt) s.execute() } } - } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala index b494814b5..d19033787 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/BatchValuesJdbcSpec.scala @@ -31,5 +31,5 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { testContext.run(op, batchSize) testContext.run(get).toSet mustEqual result.toSet } - -} \ No newline at end of file + +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala index 42d09e84a..4f5edc974 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/CaseClassQueryJdbcSpec.scala @@ -20,40 +20,60 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { } "Example 1 - Single Case Class Mapping" in { - testContext.run(`Ex 1 CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1 CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1A - Single Case Class Mapping" in { - testContext.run(`Ex 1A CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1A CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1B - Single Case Class Mapping" in { - testContext.run(`Ex 1B CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1B CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 2 - Single Record Mapped Join" in { - testContext.run(`Ex 2 Single-Record Join`) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` + testContext.run( + `Ex 2 Single-Record Join` + ) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` } "Example 3 - Inline Record as Filter" in { - testContext.run(`Ex 3 Inline Record Usage`) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + testContext.run( + `Ex 3 Inline Record Usage` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { - testContext.run(`Ex 4 Mapped Union of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/DistinctJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/DistinctJdbcSpec.scala index b8b3c36fd..6747f8f7a 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/DistinctJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/DistinctJdbcSpec.scala @@ -23,28 +23,42 @@ class DistinctJdbcSpec extends DistinctSpec { testContext.run(`Ex 1 Distinct One Field`) should contain theSameElementsAs `Ex 1 Distinct One Field Result` } "Ex 2 Distinct Two Field Tuple`" in { - testContext.run(`Ex 2 Distinct Two Field Tuple`) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` + testContext.run( + `Ex 2 Distinct Two Field Tuple` + ) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` } "Ex 2a Distinct Two Field Tuple Same Element`" in { - testContext.run(`Ex 2a Distinct Two Field Tuple Same Element`) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` + testContext.run( + `Ex 2a Distinct Two Field Tuple Same Element` + ) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` } "Ex 3 Distinct Two Field Case Class`" in { - testContext.run(`Ex 3 Distinct Two Field Case Class`) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` + testContext.run( + `Ex 3 Distinct Two Field Case Class` + ) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` } "Ex 4-base non-Distinct Subquery`" in { - testContext.run(`Ex 4-base non-Distinct Subquery`) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` + testContext.run( + `Ex 4-base non-Distinct Subquery` + ) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` } "Ex 4 Distinct Subquery`" in { testContext.run(`Ex 4 Distinct Subquery`) should contain theSameElementsAs `Ex 4 Distinct Subquery Result` } "Ex 5 Distinct Subquery with Map Single Field" in { - testContext.run(`Ex 5 Distinct Subquery with Map Single Field`) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` + testContext.run( + `Ex 5 Distinct Subquery with Map Single Field` + ) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` } "Ex 6 Distinct Subquery with Map Multi Field" in { - testContext.run(`Ex 6 Distinct Subquery with Map Multi Field`) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` + testContext.run( + `Ex 6 Distinct Subquery with Map Multi Field` + ) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` } "Ex 7 Distinct Subquery with Map Multi Field Tuple" in { - testContext.run(`Ex 7 Distinct Subquery with Map Multi Field Tuple`) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` + testContext.run( + `Ex 7 Distinct Subquery with Map Multi Field Tuple` + ) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` } "Ex 8 Distinct With Sort" in { testContext.run(`Ex 8 Distinct With Sort`) mustEqual `Ex 8 Distinct With Sort Result` diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/JdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/JdbcEncodingSpec.scala index 50be31e0b..1da10c548 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/JdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/JdbcEncodingSpec.scala @@ -17,7 +17,7 @@ class JdbcEncodingSpec extends EncodingSpec { "Encode/Decode Other Time Types" in { context.run(query[TimeEntity].delete) - val zid = ZoneId.systemDefault() + val zid = ZoneId.systemDefault() val timeEntity = TimeEntity.make(zid) context.run(query[TimeEntity].insertValue(lift(timeEntity))) val actual = context.run(query[TimeEntity]).head diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/OptionJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/OptionJdbcSpec.scala index e58f574d5..9750f3b49 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/OptionJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/OptionJdbcSpec.scala @@ -28,7 +28,9 @@ class OptionJdbcSpec extends OptionQuerySpec { } "Example 1.2 - Simple Map with Condition and GetOrElse" in { - testContext.run(`Simple Map with Condition and GetOrElse`) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` + testContext.run( + `Simple Map with Condition and GetOrElse` + ) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` } "Example 2 - Simple GetOrElse" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/PeopleJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/PeopleJdbcSpec.scala index 328a3cb0d..69f5d18fa 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/PeopleJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/PeopleJdbcSpec.scala @@ -22,7 +22,7 @@ class PeopleJdbcSpec extends PeopleSpec { testContext.run(`Ex 1 differences`) mustEqual `Ex 1 expected result` } - "Example 2 - range simple" in { //hello + "Example 2 - range simple" in { // hello testContext.run(`Ex 2 rangeSimple`(lift(`Ex 2 param 1`), lift(`Ex 2 param 2`))) mustEqual `Ex 2 expected result` } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/ProductJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/ProductJdbcSpec.scala index 9fba6ee4f..d55ef22b5 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/ProductJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/ProductJdbcSpec.scala @@ -18,16 +18,16 @@ class ProductJdbcSpec extends ProductSpec { /* H2 does not support returning generated keys for batch insert. So we have to insert one entry at a time in order to get the generated values. - */ + */ val inserted = productEntries.map(product => testContext.run(productInsert(lift(product)))) val id: Long = inserted(2) - val product = testContext.run(productById(lift(id))).head + val product = testContext.run(productById(lift(id))).head product.description mustEqual productEntries(2).description product.id mustEqual inserted(2) } "Single insert product" in { val inserted = testContext.run(productSingleInsert) - val product = testContext.run(productById(lift(inserted))).head + val product = testContext.run(productById(lift(inserted))).head product.description mustEqual "Window" product.id mustEqual inserted } @@ -48,7 +48,7 @@ class ProductJdbcSpec extends ProductSpec { val q1 = quote { product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) } - val inserted = testContext.run(q1) + val inserted = testContext.run(q1) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test2" returnedProduct.sku mustEqual 2L @@ -56,8 +56,8 @@ class ProductJdbcSpec extends ProductSpec { } "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val inserted = testContext.run(productInsert(lift(prd))) + val prd = Product(0L, "test3", 3L) + val inserted = testContext.run(productInsert(lift(prd))) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test3" returnedProduct.sku mustEqual 3L diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/package.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/package.scala index 87f7c2f85..01159ea5a 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/package.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/h2/package.scala @@ -1,7 +1,7 @@ package io.getquill.context.jdbc import io.getquill._ -import io.getquill.context.sql.{ TestDecoders, TestEncoders } +import io.getquill.context.sql.{TestDecoders, TestEncoders} package object h2 { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala index b667b437b..160787685 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/BatchValuesJdbcSpec.scala @@ -32,4 +32,4 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { testContext.run(op, batchSize) testContext.run(get).toSet mustEqual result.toSet } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala index 69f0b089f..2579c824a 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/CaseClassQueryJdbcSpec.scala @@ -20,40 +20,60 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { } "Example 1 - Single Case Class Mapping" in { - testContext.run(`Ex 1 CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1 CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1A - Single Case Class Mapping" in { - testContext.run(`Ex 1A CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1A CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1B - Single Case Class Mapping" in { - testContext.run(`Ex 1B CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1B CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 2 - Single Record Mapped Join" in { - testContext.run(`Ex 2 Single-Record Join`) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` + testContext.run( + `Ex 2 Single-Record Join` + ) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` } "Example 3 - Inline Record as Filter" in { - testContext.run(`Ex 3 Inline Record Usage`) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + testContext.run( + `Ex 3 Inline Record Usage` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { - testContext.run(`Ex 4 Mapped Union of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/DistinctJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/DistinctJdbcSpec.scala index 3b58ab10a..46c6cdafb 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/DistinctJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/DistinctJdbcSpec.scala @@ -23,28 +23,42 @@ class DistinctJdbcSpec extends DistinctSpec { testContext.run(`Ex 1 Distinct One Field`) should contain theSameElementsAs `Ex 1 Distinct One Field Result` } "Ex 2 Distinct Two Field Tuple`" in { - testContext.run(`Ex 2 Distinct Two Field Tuple`) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` + testContext.run( + `Ex 2 Distinct Two Field Tuple` + ) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` } "Ex 2a Distinct Two Field Tuple Same Element`" in { - testContext.run(`Ex 2a Distinct Two Field Tuple Same Element`) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` + testContext.run( + `Ex 2a Distinct Two Field Tuple Same Element` + ) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` } "Ex 3 Distinct Two Field Case Class`" in { - testContext.run(`Ex 3 Distinct Two Field Case Class`) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` + testContext.run( + `Ex 3 Distinct Two Field Case Class` + ) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` } "Ex 4-base non-Distinct Subquery`" in { - testContext.run(`Ex 4-base non-Distinct Subquery`) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` + testContext.run( + `Ex 4-base non-Distinct Subquery` + ) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` } "Ex 4 Distinct Subquery`" in { testContext.run(`Ex 4 Distinct Subquery`) should contain theSameElementsAs `Ex 4 Distinct Subquery Result` } "Ex 5 Distinct Subquery with Map Single Field" in { - testContext.run(`Ex 5 Distinct Subquery with Map Single Field`) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` + testContext.run( + `Ex 5 Distinct Subquery with Map Single Field` + ) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` } "Ex 6 Distinct Subquery with Map Multi Field" in { - testContext.run(`Ex 6 Distinct Subquery with Map Multi Field`) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` + testContext.run( + `Ex 6 Distinct Subquery with Map Multi Field` + ) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` } "Ex 7 Distinct Subquery with Map Multi Field Tuple" in { - testContext.run(`Ex 7 Distinct Subquery with Map Multi Field Tuple`) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` + testContext.run( + `Ex 7 Distinct Subquery with Map Multi Field Tuple` + ) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` } "Ex 8 Distinct With Sort" in { testContext.run(`Ex 8 Distinct With Sort`) should contain theSameElementsAs `Ex 8 Distinct With Sort Result` diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/JdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/JdbcEncodingSpec.scala index 3226df9be..243ffbdad 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/JdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/JdbcEncodingSpec.scala @@ -19,16 +19,15 @@ class JdbcEncodingSpec extends EncodingSpec { "encodes sets" in { testContext.run(query[EncodingTestEntity].delete) testContext.run(liftQuery(insertValues).foreach(p => query[EncodingTestEntity].insertValue(p))) - inline def q = quote { - (set: Query[Int]) => - query[EncodingTestEntity].filter(t => set.contains(t.v6)) + inline def q = quote { (set: Query[Int]) => + query[EncodingTestEntity].filter(t => set.contains(t.v6)) } verify(testContext.run(q(liftQuery(insertValues.map(_.v6).toSet)))) } "Encode/Decode Other Time Types" in { context.run(query[TimeEntity].delete) - val zid = ZoneId.systemDefault() + val zid = ZoneId.systemDefault() val timeEntity = TimeEntity.make(zid) context.run(query[TimeEntity].insertValue(lift(timeEntity))) val actual = context.run(query[TimeEntity]).head diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/OptionJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/OptionJdbcSpec.scala index ec5534982..56052992f 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/OptionJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/OptionJdbcSpec.scala @@ -28,7 +28,9 @@ class OptionJdbcSpec extends OptionQuerySpec { } "Example 1.2 - Simple Map with Condition and GetOrElse" in { - testContext.run(`Simple Map with Condition and GetOrElse`) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` + testContext.run( + `Simple Map with Condition and GetOrElse` + ) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` } "Example 2 - Simple GetOrElse" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/ProductJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/ProductJdbcSpec.scala index 901d29b7a..0317befb8 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/ProductJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/ProductJdbcSpec.scala @@ -16,13 +16,13 @@ class ProductJdbcSpec extends ProductSpec { "Product" - { "Insert multiple products" in { val inserted = testContext.run(liftQuery(productEntries).foreach(p => productInsert(p))) - val product = testContext.run(productById(lift(inserted(2)))).head + val product = testContext.run(productById(lift(inserted(2)))).head product.description mustEqual productEntries(2).description product.id mustEqual inserted(2) } "Single insert product" in { val inserted = testContext.run(productSingleInsert) - val product = testContext.run(productById(lift(inserted))).head + val product = testContext.run(productById(lift(inserted))).head product.description mustEqual "Window" product.id mustEqual inserted } @@ -45,7 +45,7 @@ class ProductJdbcSpec extends ProductSpec { inline def q1 = quote { product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) } - val inserted = testContext.run(q1) + val inserted = testContext.run(q1) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test2" returnedProduct.sku mustEqual 2L @@ -53,8 +53,8 @@ class ProductJdbcSpec extends ProductSpec { } "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val inserted = testContext.run(productInsert(lift(prd))) + val prd = Product(0L, "test3", 3L) + val inserted = testContext.run(productInsert(lift(prd))) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test3" returnedProduct.sku mustEqual 3L diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/QueryResultTypeJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/QueryResultTypeJdbcSpec.scala index 9f997717f..8e867341b 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/QueryResultTypeJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/QueryResultTypeJdbcSpec.scala @@ -18,8 +18,8 @@ class QueryResultTypeJdbcSpec extends QueryResultTypeSpec { override def beforeAll() = { context.run(deleteAll) val ids = context.run(liftQuery(productEntries).foreach(p => productInsert(p))) - val inserted = (ids zip productEntries).map { - case (id, prod) => prod.copy(id = id) + val inserted = (ids zip productEntries).map { case (id, prod) => + prod.copy(id = id) } insertedProducts.addAll(inserted.asJava) () diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/package.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/package.scala index 137200cbf..a94a4b732 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/package.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/mysql/package.scala @@ -1,10 +1,14 @@ package io.getquill.context.jdbc import io.getquill._ -import io.getquill.context.sql.{ TestDecoders, TestEncoders } +import io.getquill.context.sql.{TestDecoders, TestEncoders} package object mysql { - object testContext extends MysqlJdbcContext(Literal, "testMysqlDB") with TestEntities with TestEncoders with TestDecoders + object testContext + extends MysqlJdbcContext(Literal, "testMysqlDB") + with TestEntities + with TestEncoders + with TestDecoders } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/BatchValuesJdbcSpec.scala index e2859f53f..3073e0b9e 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/BatchValuesJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/BatchValuesJdbcSpec.scala @@ -10,7 +10,9 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { override def beforeEach(): Unit = { testContext.run(query[Product].delete) - testContext.run(sql"ALTER TABLE Product MODIFY (id GENERATED BY DEFAULT ON NULL AS IDENTITY (START WITH 1))".as[Delete[Product]]) + testContext.run( + sql"ALTER TABLE Product MODIFY (id GENERATED BY DEFAULT ON NULL AS IDENTITY (START WITH 1))".as[Delete[Product]] + ) super.beforeEach() } @@ -35,4 +37,4 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { testContext.run(op, batchSize) testContext.run(get).toSet mustEqual result.toSet } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala index 1284245f5..c6664ce02 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/CaseClassQueryJdbcSpec.scala @@ -22,41 +22,61 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { } "Example 1 - Single Case Class Mapping" in { - testContext.run(`Ex 1 CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1 CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1A - Single Case Class Mapping" in { - testContext.run(`Ex 1A CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1A CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1B - Single Case Class Mapping" in { - testContext.run(`Ex 1B CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1B CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 2 - Single Record Mapped Join" in { - testContext.run(`Ex 2 Single-Record Join`) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` + testContext.run( + `Ex 2 Single-Record Join` + ) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` } "Example 3 - Inline Record as Filter" in { - testContext.run(`Ex 3 Inline Record Usage`) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + testContext.run( + `Ex 3 Inline Record Usage` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { println(testContext.translate(`Ex 4 Mapped Union of Nicknames`)) - testContext.run(`Ex 4 Mapped Union of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DepartmentsJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DepartmentsJdbcSpec.scala index 40ac9aa6c..7e68e65ad 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DepartmentsJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DepartmentsJdbcSpec.scala @@ -25,10 +25,14 @@ class DepartmentsJdbcSpec extends DepartmentsSpec { } "Example 8 - nested naive" in { - testContext.run(`Example 8 expertise naive`(lift(`Example 8 param`))) should contain theSameElementsAs `Example 8 expected result` + testContext.run( + `Example 8 expertise naive`(lift(`Example 8 param`)) + ) should contain theSameElementsAs `Example 8 expected result` } "Example 9 - nested db" in { - testContext.run(`Example 9 expertise`(lift(`Example 9 param`))) should contain theSameElementsAs `Example 9 expected result` + testContext.run( + `Example 9 expertise`(lift(`Example 9 param`)) + ) should contain theSameElementsAs `Example 9 expected result` } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DistinctJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DistinctJdbcSpec.scala index 67f37dabf..d0d84a71b 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DistinctJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/DistinctJdbcSpec.scala @@ -23,28 +23,42 @@ class DistinctJdbcSpec extends DistinctSpec { testContext.run(`Ex 1 Distinct One Field`) should contain theSameElementsAs `Ex 1 Distinct One Field Result` } "Ex 2 Distinct Two Field Tuple`" in { - testContext.run(`Ex 2 Distinct Two Field Tuple`) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` + testContext.run( + `Ex 2 Distinct Two Field Tuple` + ) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` } "Ex 2a Distinct Two Field Tuple Same Element`" in { - testContext.run(`Ex 2a Distinct Two Field Tuple Same Element`) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` + testContext.run( + `Ex 2a Distinct Two Field Tuple Same Element` + ) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` } "Ex 3 Distinct Two Field Case Class`" in { - testContext.run(`Ex 3 Distinct Two Field Case Class`) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` + testContext.run( + `Ex 3 Distinct Two Field Case Class` + ) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` } "Ex 4-base non-Distinct Subquery`" in { - testContext.run(`Ex 4-base non-Distinct Subquery`) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` + testContext.run( + `Ex 4-base non-Distinct Subquery` + ) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` } "Ex 4 Distinct Subquery`" in { testContext.run(`Ex 4 Distinct Subquery`) should contain theSameElementsAs `Ex 4 Distinct Subquery Result` } "Ex 5 Distinct Subquery with Map Single Field" in { - testContext.run(`Ex 5 Distinct Subquery with Map Single Field`) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` + testContext.run( + `Ex 5 Distinct Subquery with Map Single Field` + ) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` } "Ex 6 Distinct Subquery with Map Multi Field" in { - testContext.run(`Ex 6 Distinct Subquery with Map Multi Field`) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` + testContext.run( + `Ex 6 Distinct Subquery with Map Multi Field` + ) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` } "Ex 7 Distinct Subquery with Map Multi Field Tuple" in { - testContext.run(`Ex 7 Distinct Subquery with Map Multi Field Tuple`) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` + testContext.run( + `Ex 7 Distinct Subquery with Map Multi Field Tuple` + ) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` } "Ex 8 Distinct With Sort" in { testContext.run(`Ex 8 Distinct With Sort`) mustEqual `Ex 8 Distinct With Sort Result` diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/JdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/JdbcEncodingSpec.scala index 20ba51cfe..0fd571335 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/JdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/JdbcEncodingSpec.scala @@ -1,6 +1,6 @@ package io.getquill.context.jdbc.oracle -import io.getquill.context.sql.{ EncodingSpec, EncodingTestType } +import io.getquill.context.sql.{EncodingSpec, EncodingTestType} import io.getquill.Query import io.getquill._ import java.time.ZoneId @@ -19,16 +19,15 @@ class JdbcEncodingSpec extends EncodingSpec { "encodes sets" in { testContext.run(query[EncodingTestEntity].delete) testContext.run(liftQuery(insertValues).foreach(p => query[EncodingTestEntity].insertValue(p))) - inline def q = quote { - (set: Query[Int]) => - query[EncodingTestEntity].filter(t => set.contains(t.v6)) + inline def q = quote { (set: Query[Int]) => + query[EncodingTestEntity].filter(t => set.contains(t.v6)) } verify(testContext.run(q(liftQuery(insertValues.map(_.v6).toSet)))) } "Encode/Decode Other Time Types" in { context.run(query[TimeEntity].delete) - val zid = ZoneId.systemDefault() + val zid = ZoneId.systemDefault() val timeEntity = TimeEntity.make(zid) context.run(query[TimeEntity].insertValue(lift(timeEntity))) val actual = context.run(query[TimeEntity]).head @@ -36,46 +35,46 @@ class JdbcEncodingSpec extends EncodingSpec { } def emptyAsNull(e: EncodingTestType) = e.copy(value = if (e.value == "") null else e.value) - def emptyAsNull(str: String) = if (str == "") null else str - def emptyAsNull(bytes: Array[Byte]) = if (bytes == null || bytes.isEmpty) null else bytes + def emptyAsNull(str: String) = if (str == "") null else str + def emptyAsNull(bytes: Array[Byte]) = if (bytes == null || bytes.isEmpty) null else bytes /** - * Since oracle encodes "" as null, need to modify verification in order to track this + * Since oracle encodes "" as null, need to modify verification in order to + * track this */ override def verify(result: List[EncodingTestEntity]) = { result.size mustEqual insertValues.size - result.zip(insertValues).foreach { - case (e1, e2) => - emptyAsNull(e2.v1) mustEqual emptyAsNull(e2.v1) - e1.v2 mustEqual e2.v2 - e1.v3 mustEqual e2.v3 - e1.v4 mustEqual e2.v4 - e1.v5 mustEqual e2.v5 - e1.v6 mustEqual e2.v6 - e1.v7 mustEqual e2.v7 - e1.v8 mustEqual e2.v8 - e1.v9 mustEqual e2.v9 - emptyAsNull(e1.v10) mustEqual emptyAsNull(e2.v10) - e1.v11 mustEqual e2.v11 - emptyAsNull(e1.v12) mustEqual emptyAsNull(e2.v12) - e1.v13 mustEqual e2.v13 - e1.v14 mustEqual e2.v14 + result.zip(insertValues).foreach { case (e1, e2) => + emptyAsNull(e2.v1) mustEqual emptyAsNull(e2.v1) + e1.v2 mustEqual e2.v2 + e1.v3 mustEqual e2.v3 + e1.v4 mustEqual e2.v4 + e1.v5 mustEqual e2.v5 + e1.v6 mustEqual e2.v6 + e1.v7 mustEqual e2.v7 + e1.v8 mustEqual e2.v8 + e1.v9 mustEqual e2.v9 + emptyAsNull(e1.v10) mustEqual emptyAsNull(e2.v10) + e1.v11 mustEqual e2.v11 + emptyAsNull(e1.v12) mustEqual emptyAsNull(e2.v12) + e1.v13 mustEqual e2.v13 + e1.v14 mustEqual e2.v14 - e1.o1 mustEqual e2.o1 - e1.o2 mustEqual e2.o2 - e1.o3 mustEqual e2.o3 - e1.o4 mustEqual e2.o4 - e1.o5 mustEqual e2.o5 - e1.o6 mustEqual e2.o6 - e1.o7 mustEqual e2.o7 - e1.o8 mustEqual e2.o8 - e1.o9 mustEqual e2.o9 - e1.o10.getOrElse(Array[Byte]()) mustEqual e2.o10.getOrElse(Array[Byte]()) - e1.o11 mustEqual e2.o11 - e1.o12 mustEqual e2.o12 - e1.o13 mustEqual e2.o13 - e1.o14 mustEqual e2.o14 - e1.o15 mustEqual e2.o15 + e1.o1 mustEqual e2.o1 + e1.o2 mustEqual e2.o2 + e1.o3 mustEqual e2.o3 + e1.o4 mustEqual e2.o4 + e1.o5 mustEqual e2.o5 + e1.o6 mustEqual e2.o6 + e1.o7 mustEqual e2.o7 + e1.o8 mustEqual e2.o8 + e1.o9 mustEqual e2.o9 + e1.o10.getOrElse(Array[Byte]()) mustEqual e2.o10.getOrElse(Array[Byte]()) + e1.o11 mustEqual e2.o11 + e1.o12 mustEqual e2.o12 + e1.o13 mustEqual e2.o13 + e1.o14 mustEqual e2.o14 + e1.o15 mustEqual e2.o15 } } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ProductJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ProductJdbcSpec.scala index 28bbeb8ec..66970cc7d 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ProductJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ProductJdbcSpec.scala @@ -16,13 +16,13 @@ class ProductJdbcSpec extends ProductSpec { "Product" - { "Insert multiple products" in { val inserted = testContext.run(liftQuery(productEntries).foreach(p => productInsert(p))) - val product = testContext.run(productById(lift(inserted(2)))).head + val product = testContext.run(productById(lift(inserted(2)))).head product.description mustEqual productEntries(2).description product.id mustEqual inserted(2) } "Single insert product" in { val inserted = testContext.run(productSingleInsert) - val product = testContext.run(productById(lift(inserted))).head + val product = testContext.run(productById(lift(inserted))).head product.description mustEqual "Window" product.id mustEqual inserted } @@ -42,7 +42,7 @@ class ProductJdbcSpec extends ProductSpec { val q1 = quote { product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) } - val inserted = testContext.run(q1) + val inserted = testContext.run(q1) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test2" returnedProduct.sku mustEqual 2L @@ -50,8 +50,8 @@ class ProductJdbcSpec extends ProductSpec { } "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val inserted = testContext.run(productInsert(lift(prd))) + val prd = Product(0L, "test3", 3L) + val inserted = testContext.run(productInsert(lift(prd))) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test3" returnedProduct.sku mustEqual 3L diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/QueryResultTypeJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/QueryResultTypeJdbcSpec.scala index e0e054122..8e7291fe9 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/QueryResultTypeJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/QueryResultTypeJdbcSpec.scala @@ -1,7 +1,7 @@ package io.getquill.context.jdbc.oracle import java.util.concurrent.ConcurrentLinkedQueue -import io.getquill.context.sql.{ testContext => _, _ } +import io.getquill.context.sql.{testContext => _, _} import scala.jdk.CollectionConverters._ import io.getquill._ @@ -18,8 +18,8 @@ class QueryResultTypeJdbcSpec extends QueryResultTypeSpec { override def beforeAll() = { context.run(deleteAll) val ids = context.run(liftQuery(productEntries).foreach(p => productInsert(p))) - val inserted = (ids zip productEntries).map { - case (id, prod) => prod.copy(id = id) + val inserted = (ids zip productEntries).map { case (id, prod) => + prod.copy(id = id) } insertedProducts.addAll(inserted.asJava) () diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ScalarValueSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ScalarValueSpec.scala index 1171a375d..89556dd97 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ScalarValueSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/ScalarValueSpec.scala @@ -19,4 +19,4 @@ // "Multi Scalar Select with Infix" in { // context.run("foo" + sql"""'bar'""".as[String]) mustEqual "foobar" // } -// } \ No newline at end of file +// } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/oracle.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/oracle.scala index 14147a1e9..fd0591051 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/oracle.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/oracle/oracle.scala @@ -1,10 +1,14 @@ package io.getquill.context.jdbc import io.getquill._ -import io.getquill.context.sql.{ TestDecoders, TestEncoders } +import io.getquill.context.sql.{TestDecoders, TestEncoders} package object oracle { - object testContext extends OracleJdbcContext(Literal, "testOracleDB") with TestEntities with TestEncoders with TestDecoders + object testContext + extends OracleJdbcContext(Literal, "testOracleDB") + with TestEntities + with TestEncoders + with TestDecoders } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala index d56dccb22..51d710e4f 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ArrayJdbcEncodingSpec.scala @@ -11,7 +11,7 @@ class ArrayJdbcEncodingSpec extends ArrayEncodingBaseSpec { val ctx = testContext import ctx._ - inline def q = quote(query[ArraysTestEntity]) + inline def q = quote(query[ArraysTestEntity]) val corrected = e.copy(timestamps = e.timestamps.map(d => new Timestamp(d.getTime))) "Support all sql base types and `Seq` implementers" in { // diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala index 815928366..bb007fe5b 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/BatchValuesJdbcSpec.scala @@ -31,4 +31,4 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { testContext.run(op, batchSize) testContext.run(get) mustEqual result } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala index 4567d4eff..779f2116e 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/CaseClassQueryJdbcSpec.scala @@ -21,40 +21,60 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { } "Example 1 - Single Case Class Mapping" in { - testContext.run(`Ex 1 CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1 CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1A - Single Case Class Mapping" in { - testContext.run(`Ex 1A CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1A CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1B - Single Case Class Mapping" in { - testContext.run(`Ex 1B CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1B CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 2 - Single Record Mapped Join" in { - testContext.run(`Ex 2 Single-Record Join`) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` + testContext.run( + `Ex 2 Single-Record Join` + ) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` } "Example 3 - Inline Record as Filter" in { - testContext.run(`Ex 3 Inline Record Usage`) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + testContext.run( + `Ex 3 Inline Record Usage` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` } "Example 4 - Ex 4 Mapped Union of Nicknames" in { - testContext.run(`Ex 4 Mapped Union of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Filtered expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field expected result` } "Example 4 - Ex 4 Mapped Union All of Nicknames Same Field Filtered" in { - testContext.run(`Ex 4 Mapped Union All of Nicknames Same Field Filtered`) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` + testContext.run( + `Ex 4 Mapped Union All of Nicknames Same Field Filtered` + ) should contain theSameElementsAs `Ex 4 Mapped Union All of Nicknames Same Field Filtered expected result` } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/DistinctJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/DistinctJdbcSpec.scala index 3b2006969..dd3fac1c7 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/DistinctJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/DistinctJdbcSpec.scala @@ -23,28 +23,42 @@ class DistinctJdbcSpec extends DistinctSpec { testContext.run(`Ex 1 Distinct One Field`) should contain theSameElementsAs `Ex 1 Distinct One Field Result` } "Ex 2 Distinct Two Field Tuple`" in { - testContext.run(`Ex 2 Distinct Two Field Tuple`) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` + testContext.run( + `Ex 2 Distinct Two Field Tuple` + ) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` } "Ex 2a Distinct Two Field Tuple Same Element`" in { - testContext.run(`Ex 2a Distinct Two Field Tuple Same Element`) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` + testContext.run( + `Ex 2a Distinct Two Field Tuple Same Element` + ) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` } "Ex 3 Distinct Two Field Case Class`" in { - testContext.run(`Ex 3 Distinct Two Field Case Class`) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` + testContext.run( + `Ex 3 Distinct Two Field Case Class` + ) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` } "Ex 4-base non-Distinct Subquery`" in { - testContext.run(`Ex 4-base non-Distinct Subquery`) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` + testContext.run( + `Ex 4-base non-Distinct Subquery` + ) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` } "Ex 4 Distinct Subquery`" in { testContext.run(`Ex 4 Distinct Subquery`) should contain theSameElementsAs `Ex 4 Distinct Subquery Result` } "Ex 5 Distinct Subquery with Map Single Field" in { - testContext.run(`Ex 5 Distinct Subquery with Map Single Field`) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` + testContext.run( + `Ex 5 Distinct Subquery with Map Single Field` + ) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` } "Ex 6 Distinct Subquery with Map Multi Field" in { - testContext.run(`Ex 6 Distinct Subquery with Map Multi Field`) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` + testContext.run( + `Ex 6 Distinct Subquery with Map Multi Field` + ) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` } "Ex 7 Distinct Subquery with Map Multi Field Tuple" in { - testContext.run(`Ex 7 Distinct Subquery with Map Multi Field Tuple`) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` + testContext.run( + `Ex 7 Distinct Subquery with Map Multi Field Tuple` + ) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` } "Ex 8 Distinct With Sort" in { testContext.run(`Ex 8 Distinct With Sort`) mustEqual `Ex 8 Distinct With Sort Result` diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerCombinationSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerCombinationSpec.scala index 373e99d8d..7bec76f16 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerCombinationSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerCombinationSpec.scala @@ -16,32 +16,41 @@ class FlicerCombinationSpec extends Spec with Inside { ctx.run(sql"TRUNCATE TABLE AddressT, PersonT RESTART IDENTITY".as[Delete[PersonT]]) // Using sequence generation in the DB to create a table with a large amount of content fast. Otherwise // the test has to wait for 1000000 individual inserts which is very slow. - ctx.run(sql"insert into PersonT (first, last, age) select i, i, i from generate_series(1, 100000) as t(i);".as[Insert[PersonT]]) - ctx.run(sql"insert into AddressT (ownerId, street) select i, i from generate_series(1, 100000) as t(i);".as[Insert[PersonT]]) + ctx.run( + sql"insert into PersonT (first, last, age) select i, i, i from generate_series(1, 100000) as t(i);" + .as[Insert[PersonT]] + ) + ctx.run( + sql"insert into AddressT (ownerId, street) select i, i from generate_series(1, 100000) as t(i);" + .as[Insert[PersonT]] + ) } - override def afterAll(): Unit = { + override def afterAll(): Unit = // Want to truncate instead of delete so that plan cost will be consistent ctx.run(sql"TRUNCATE TABLE AddressT, PersonT RESTART IDENTITY".as[Delete[PersonT]]) - } "Selection should be correct when" - { inline def q(inline columns: List[String], inline keys: Map[String, String]) = quote { - query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId) + query[PersonT] + .leftJoin(query[AddressT]) + .on((p, a) => p.id == a.ownerId) .map((p, a) => PersonAddress(p.first, p.last, a.map(_.street))) .filterByKeys(keys) .filterColumns(columns) .take(10) } "Keys used for filteration are included in the selection output - and they are out of order" in { - ctx.run(q(List("last", "first", "street"), Map("first" -> "1"))) mustEqual List(PersonAddress("1","1",Some("1"))) + ctx.run(q(List("last", "first", "street"), Map("first" -> "1"))) mustEqual List( + PersonAddress("1", "1", Some("1")) + ) } "Keys used for filteration are NOT included in the selection output" in { - ctx.run(q(List("last", "street"), Map("first" -> "1"))) mustEqual List(PersonAddress(null,"1",Some("1"))) + ctx.run(q(List("last", "street"), Map("first" -> "1"))) mustEqual List(PersonAddress(null, "1", Some("1"))) } "Keys used for filteration are NOT included in the selection output and only one table is selected" in { - ctx.run(q(List("last"), Map("first" -> "1"))) mustEqual List(PersonAddress(null,"1",None)) + ctx.run(q(List("last"), Map("first" -> "1"))) mustEqual List(PersonAddress(null, "1", None)) } } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerMapTypesSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerMapTypesSpec.scala index c522442cd..ec1920cac 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerMapTypesSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerMapTypesSpec.scala @@ -27,7 +27,7 @@ class FlicerMapTypesSpec extends Spec with Inside { ) case class DateEncodingTestEntity(v1: LocalDate, v2: LocalDateTime) def makeEntity(i: Int) = { - val ld = LocalDate.of(2022, i, i) + val ld = LocalDate.of(2022, i, i) val ldt = LocalDateTime.of(2022, i, i, i, i, i) DateEncodingTestEntity(ld, ldt) } @@ -38,7 +38,7 @@ class FlicerMapTypesSpec extends Spec with Inside { ) override def beforeAll(): Unit = { - val people = quote { query[Contact] } + val people = quote(query[Contact]) ctx.run(people.delete) ctx.run(liftQuery(contacts).foreach(c => people.insertValue(c))) ctx.run(query[DateEncodingTestEntity].delete) @@ -48,17 +48,23 @@ class FlicerMapTypesSpec extends Spec with Inside { "Should do correct query from map with" - { "simple datatypes" in { ctx.run(query[Contact].filterByKeys(Map("firstName" -> "Joe", "addressFk" -> 1))) mustEqual - List(Contact("Joe","Bloggs",123,1,"1"), Contact("Joe","Noggs",123,1,"1")) + List(Contact("Joe", "Bloggs", 123, 1, "1"), Contact("Joe", "Noggs", 123, 1, "1")) } "string datatypes" in { ctx.run(query[Contact].filterByKeys(Map("firstName" -> "Joe", "addressFk" -> "1"))) mustEqual - List(Contact("Joe","Bloggs",123,1,"1"), Contact("Joe","Noggs",123,1,"1")) + List(Contact("Joe", "Bloggs", 123, 1, "1"), Contact("Joe", "Noggs", 123, 1, "1")) } "date datatypes" in { - ctx.run(query[DateEncodingTestEntity].filterByKeys(Map("v1" -> makeEntity(1).v1, "v2" -> makeEntity(1).v2))) mustEqual List(makeEntity(1)) + ctx.run( + query[DateEncodingTestEntity].filterByKeys(Map("v1" -> makeEntity(1).v1, "v2" -> makeEntity(1).v2)) + ) mustEqual List(makeEntity(1)) } "date-string datatypes" in { - ctx.run(query[DateEncodingTestEntity].filterByKeys(Map("v1" -> makeEntity(1).v1.toString, "v2" -> makeEntity(1).v2.toString))) mustEqual List(makeEntity(1)) + ctx.run( + query[DateEncodingTestEntity].filterByKeys( + Map("v1" -> makeEntity(1).v1.toString, "v2" -> makeEntity(1).v2.toString) + ) + ) mustEqual List(makeEntity(1)) } } @@ -83,6 +89,6 @@ class FlicerMapTypesSpec extends Spec with Inside { val map = Map[String, Any]("extraInfo" -> Info("1")) ctx.run(querySchema[ContactComplex]("Contact").filterByKeys(map)) mustEqual - List(ContactComplex("Joe","Bloggs",123,1,Info("1")), ContactComplex("Joe","Noggs",123,1,Info("1"))) + List(ContactComplex("Joe", "Bloggs", 123, 1, Info("1")), ContactComplex("Joe", "Noggs", 123, 1, Info("1"))) } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerVariableColumnsSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerVariableColumnsSpec.scala index eb0c2e0b9..10e83d7c4 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerVariableColumnsSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/FlicerVariableColumnsSpec.scala @@ -15,30 +15,43 @@ class FlicerVariableColumnsSpec extends Spec with Inside { ctx.run(sql"TRUNCATE TABLE AddressT, PersonT RESTART IDENTITY".as[Delete[PersonT]]) // Using sequence generation in the DB to create a table with a large amount of content fast. Otherwise // the test has to wait for 1000000 individual inserts which is very slow. - ctx.run(sql"insert into PersonT (first, last, age) select i, i, i from generate_series(1, 100000) as t(i);".as[Insert[PersonT]]) - ctx.run(sql"insert into AddressT (ownerId, street) select i, i from generate_series(1, 100000) as t(i);".as[Insert[PersonT]]) + ctx.run( + sql"insert into PersonT (first, last, age) select i, i, i from generate_series(1, 100000) as t(i);" + .as[Insert[PersonT]] + ) + ctx.run( + sql"insert into AddressT (ownerId, street) select i, i from generate_series(1, 100000) as t(i);" + .as[Insert[PersonT]] + ) } - override def afterAll(): Unit = { + override def afterAll(): Unit = // Want to truncate instead of delete so that plan cost will be consistent ctx.run(sql"TRUNCATE TABLE AddressT, PersonT RESTART IDENTITY".as[Delete[PersonT]]) - } "Query Plan should adjust accordingly when" - { inline def q(inline columns: List[String]) = quote { - query[PersonT].leftJoin(query[AddressT]).on((p, a) => p.id == a.ownerId) + query[PersonT] + .leftJoin(query[AddressT]) + .on((p, a) => p.id == a.ownerId) .map((p, a) => Combo(p.first, a.map(_.street))) .filterColumns(columns) } inline def plan(inline columns: List[String]) = - quote { sql"EXPLAIN ${q(columns)}".pure.as[Query[String]] } + quote(sql"EXPLAIN ${q(columns)}".pure.as[Query[String]]) "one column from the joined table is selected" in { val columns = List[String]("street") // Also check that the actual query works ctx.run(q(columns).take(5)) mustEqual - List(Combo(null,Some("1")), Combo(null,Some("2")), Combo(null,Some("3")), Combo(null,Some("4")), Combo(null,Some("5"))) + List( + Combo(null, Some("1")), + Combo(null, Some("2")), + Combo(null, Some("3")), + Combo(null, Some("4")), + Combo(null, Some("5")) + ) } // I.e. if no columns from the joined table are selected and the joined table has a unique constraint on the joined @@ -52,20 +65,19 @@ class FlicerVariableColumnsSpec extends Spec with Inside { // Note that despite the fact that we are selecting from PersonT, the Combo class's column is `name` so that is what // we are get columns from. val columns = List[String]("name") - inside(ctx.run(plan(columns), OuterSelectWrap.Never)) { - case List(result) => - result must startWith("Seq Scan on persont p") + inside(ctx.run(plan(columns), OuterSelectWrap.Never)) { case List(result) => + result must startWith("Seq Scan on persont p") } // Also check that the actual query works ctx.run(q(columns).take(5)) mustEqual - List(Combo("1",None), Combo("2",None), Combo("3",None), Combo("4",None), Combo("5",None)) + List(Combo("1", None), Combo("2", None), Combo("3", None), Combo("4", None), Combo("5", None)) } "no columns from any table are selected" in { val columns = List[String]() - val result = ctx.run(plan(columns), OuterSelectWrap.Never).head + val result = ctx.run(plan(columns), OuterSelectWrap.Never).head result must startWith("Seq Scan on persont p") } } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcArrayOpsSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcArrayOpsSpec.scala index 3ae506451..3b4772f73 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcArrayOpsSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcArrayOpsSpec.scala @@ -8,7 +8,7 @@ import io.getquill.context.sql.SqlContext class JdbcArrayOpsSpec extends ArrayOpsSpec { // Need to cast the context to get PostgresDialect, Literal working otherwise it will try to summon 'Idiom' // maybe there should be a fallback for that actually - val ctx: testContext.type = testContext //: SqlContext[PostgresDialect, Literal] with ArrayEncoding + val ctx: testContext.type = testContext // : SqlContext[PostgresDialect, Literal] with ArrayEncoding import ctx._ "contains" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala index b0fb00319..09dc07b89 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcContextSpec.scala @@ -92,7 +92,9 @@ class JdbcContextSpec extends Spec { "with multiple columns and operations" in { ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) + qr1 + .insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))) + .returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) } (1 + 100, "foo", Some(123 + 100)) mustBe inserted } @@ -103,8 +105,9 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insertValue(lift(TestEntity("two", 36, 18L, Some(123), true))).returning(r => - (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) + qr1 + .insertValue(lift(TestEntity("two", 36, 18L, Some(123), true))) + .returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) } (36, "two_s", Some("foobar")) mustBe inserted } @@ -116,8 +119,9 @@ class JdbcContextSpec extends Spec { val value = "foobar" ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insertValue(lift(TestEntity("two", 36, 18L, Some(123), true))).returning(r => - (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) + qr1 + .insertValue(lift(TestEntity("two", 36, 18L, Some(123), true))) + .returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) } (36, "two_s", Some("foobar")) mustBe inserted } @@ -126,8 +130,9 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.delete) ctx.run(qr1.insertValue(lift(TestEntity("one", 1, 18L, Some(1), true)))) val inserted = ctx.run { - qr1.insertValue(lift(TestEntity("two", 2, 18L, Some(123), true))).returning(r => - (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) + qr1 + .insertValue(lift(TestEntity("two", 2, 18L, Some(123), true))) + .returning(r => (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) } (2, "two_s", Some("one")) mustBe inserted } @@ -136,8 +141,7 @@ class JdbcContextSpec extends Spec { ctx.run(qr1Emb.delete) ctx.run(qr1Emb.insertValue(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) val inserted = ctx.run { - qr1Emb.insertValue(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => - (r.emb.i, r.o)) + qr1Emb.insertValue(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => (r.emb.i, r.o)) } (2, Some(123)) mustBe inserted } @@ -168,7 +172,9 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val updated = ctx.run { - qr1.updateValue(lift(TestEntity("bar", 2, 42L, Some(321), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) + qr1 + .updateValue(lift(TestEntity("bar", 2, 42L, Some(321), true))) + .returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) } (2 + 100, "bar", Some(321 + 100)) mustBe updated } @@ -181,8 +187,9 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val updated = ctx.run { - qr1.updateValue(lift(TestEntity("bar", 36, 42L, Some(321), true))).returning(r => - (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) + qr1 + .updateValue(lift(TestEntity("bar", 36, 42L, Some(321), true))) + .returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) } (36, "bar_s", Some("foobar")) mustBe updated } @@ -196,8 +203,9 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val updated = ctx.run { - qr1.updateValue(lift(TestEntity("bar", 36, 42L, Some(321), true))).returning(r => - (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) + qr1 + .updateValue(lift(TestEntity("bar", 36, 42L, Some(321), true))) + .returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) } (36, "bar_s", Some("foobar")) mustBe updated } @@ -207,8 +215,9 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("one", 1, 18L, Some(1), true)))) val updated = ctx.run { - qr1.updateValue(lift(TestEntity("two", 2, 18L, Some(123), true))).returning(r => - (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) + qr1 + .updateValue(lift(TestEntity("two", 2, 18L, Some(123), true))) + .returning(r => (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) } (2, "two_s", Some("one")) mustBe updated } @@ -264,8 +273,7 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val deleted = ctx.run { - qr1.delete.returning(r => - (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) + qr1.delete.returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i).map(_.s).max)) } (1, "foo_s", Some("foobar")) mustBe deleted } @@ -279,8 +287,7 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true)))) val deleted = ctx.run { - qr1.delete.returning(r => - (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) + qr1.delete.returning(r => (r.i, r.s + "_s", qr2.filter(rr => rr.i == r.i && rr.s == lift(value)).map(_.s).max)) } (1, "foo_s", Some("foobar")) mustBe deleted } @@ -290,8 +297,7 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("one", 2, 18L, Some(1), true)))) val deleted = ctx.run { - qr1.delete.returning(r => - (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) + qr1.delete.returning(r => (r.i, r.s + "_s", qr1.filter(rr => rr.o.exists(_ == r.i - 1)).map(_.s).max)) } (2, "one_s", Some("one")) mustBe deleted } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala index 437375ac5..613ee3945 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/JdbcEncodingSpec.scala @@ -22,15 +22,14 @@ class JdbcEncodingSpec extends EncodingSpec { "encodes sets" in { testContext.run(query[EncodingTestEntity].delete) testContext.run(liftQuery(insertValues).foreach(e => query[EncodingTestEntity].insertValue(e))) - val q = quote { - (set: Query[Int]) => - query[EncodingTestEntity].filter(t => set.contains(t.v6)) + val q = quote { (set: Query[Int]) => + query[EncodingTestEntity].filter(t => set.contains(t.v6)) } verify(testContext.run(q(liftQuery(insertValues.map(_.v6))))) } "returning custom type" in { - val uuid = testContext.run(insertBarCode(lift(barCodeEntry))).get + val uuid = testContext.run(insertBarCode(lift(barCodeEntry))).get val (barCode :: Nil) = testContext.run(findBarCodeByUuid(uuid)) verifyBarcode(barCode) @@ -40,8 +39,8 @@ class JdbcEncodingSpec extends EncodingSpec { "LocalDateTime" in { case class EncodingTestEntity(v11: Option[LocalDateTime]) val now = LocalDateTime.now().truncatedTo(ChronoUnit.MICROS) - val e1 = EncodingTestEntity(Some(now)) - val e2 = EncodingTestEntity(None) + val e1 = EncodingTestEntity(Some(now)) + val e2 = EncodingTestEntity(None) val res: (List[EncodingTestEntity], List[EncodingTestEntity]) = { val steps = { testContext.run(query[EncodingTestEntity].delete) @@ -60,7 +59,7 @@ class JdbcEncodingSpec extends EncodingSpec { "Encode/Decode Other Time Types" in { context.run(query[TimeEntity].delete) - val zid = ZoneId.systemDefault() + val zid = ZoneId.systemDefault() val timeEntity = TimeEntity.make(zid) context.run(query[TimeEntity].insertValue(lift(timeEntity))) val actual = context.run(query[TimeEntity]).head diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OnConflictJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OnConflictJdbcSpec.scala index 387b478d5..4b5c5a954 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OnConflictJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OnConflictJdbcSpec.scala @@ -44,9 +44,7 @@ class OnConflictJdbcSpec extends OnConflictSpec { _.i2 -> "i", _.l2 -> "l", _.o2 -> "o" - ).insertValue(lift(e)).onConflictUpdate(_.i2)( - (t, _) => t.l2 -> (t.l2 + 1) - ) + ).insertValue(lift(e)).onConflictUpdate(_.i2)((t, _) => t.l2 -> (t.l2 + 1)) } val e1Rename = TestEntityRename("r1", 4, 0, None) diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OptionJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OptionJdbcSpec.scala index 93ee9e9bc..6c080cf0e 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OptionJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/OptionJdbcSpec.scala @@ -28,7 +28,9 @@ class OptionJdbcSpec extends OptionQuerySpec { } "Example 1.2 - Simple Map with Condition and GetOrElse" in { - testContext.run(`Simple Map with Condition and GetOrElse`) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` + testContext.run( + `Simple Map with Condition and GetOrElse` + ) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` } "Example 2 - Simple GetOrElse" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala index e03061cc3..cbf5ffdd9 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/ProductJdbcSpec.scala @@ -16,14 +16,14 @@ class ProductJdbcSpec extends ProductSpec { "Product" - { "Insert multiple products" in { val inserted = testContext.run(liftQuery(productEntries).foreach(e => productInsert(e))) - val product = testContext.run(productById(lift(inserted(2)))).head + val product = testContext.run(productById(lift(inserted(2)))).head product.description mustEqual productEntries(2).description product.id mustEqual inserted(2) } "Single insert product" in { val inserted = testContext.run(productSingleInsert) - val product = testContext.run(productById(lift(inserted))).head + val product = testContext.run(productById(lift(inserted))).head product.description mustEqual "Window" product.id mustEqual inserted } @@ -46,7 +46,7 @@ class ProductJdbcSpec extends ProductSpec { val q1 = quote { product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returning(_.id) } - val inserted = testContext.run(q1) + val inserted = testContext.run(q1) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test2" returnedProduct.sku mustEqual 2L @@ -54,8 +54,8 @@ class ProductJdbcSpec extends ProductSpec { } "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val inserted = testContext.run(productInsert(lift(prd))) + val prd = Product(0L, "test3", 3L) + val inserted = testContext.run(productInsert(lift(prd))) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test3" returnedProduct.sku mustEqual 3L diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/package.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/package.scala index 6bd486898..e812f260b 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/package.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/postgres/package.scala @@ -1,10 +1,14 @@ package io.getquill.context.jdbc import io.getquill._ -import io.getquill.context.sql.{ TestDecoders, TestEncoders } +import io.getquill.context.sql.{TestDecoders, TestEncoders} package object postgres { - object testContext extends PostgresJdbcContext[Literal](Literal, "testPostgresDB") with TestEntities with TestEncoders with TestDecoders + object testContext + extends PostgresJdbcContext[Literal](Literal, "testPostgresDB") + with TestEntities + with TestEncoders + with TestDecoders } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/BatchValuesJdbcSpec.scala index 4219b5142..376130ac1 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/BatchValuesJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/BatchValuesJdbcSpec.scala @@ -10,8 +10,8 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { override def beforeEach(): Unit = { testContext.run(query[Product].delete) - //For the Ex 2 test to actually work, the ids of the inserted entities need to start - //testContext.run(sql"DELETE FROM sqlite_sequence WHERE name='Product';".as[Delete[Product]]) + // For the Ex 2 test to actually work, the ids of the inserted entities need to start + // testContext.run(sql"DELETE FROM sqlite_sequence WHERE name='Product';".as[Delete[Product]]) super.beforeEach() } @@ -40,4 +40,4 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { testContext.run(op, batchSize) testContext.run(get).toSet mustEqual result.toSet } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala index 66a9088df..33ff9c698 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/CaseClassQueryJdbcSpec.scala @@ -20,20 +20,30 @@ class CaseClassQueryJdbcSpec extends CaseClassQuerySpec { } "Example 1 - Single Case Class Mapping" in { - testContext.run(`Ex 1 CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1 CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1A - Single Case Class Mapping" in { - testContext.run(`Ex 1A CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1A CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 1B - Single Case Class Mapping" in { - testContext.run(`Ex 1B CaseClass Record Output`) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` + testContext.run( + `Ex 1B CaseClass Record Output` + ) should contain theSameElementsAs `Ex 1 CaseClass Record Output expected result` } "Example 2 - Single Record Mapped Join" in { - testContext.run(`Ex 2 Single-Record Join`) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` + testContext.run( + `Ex 2 Single-Record Join` + ) should contain theSameElementsAs `Ex 2 Single-Record Join expected result` } "Example 3 - Inline Record as Filter" in { - testContext.run(`Ex 3 Inline Record Usage`) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` + testContext.run( + `Ex 3 Inline Record Usage` + ) should contain theSameElementsAs `Ex 3 Inline Record Usage exepected result` } } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/DistinctJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/DistinctJdbcSpec.scala index 69e31cc38..b522fddfc 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/DistinctJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/DistinctJdbcSpec.scala @@ -23,28 +23,42 @@ class DistinctJdbcSpec extends DistinctSpec { testContext.run(`Ex 1 Distinct One Field`) should contain theSameElementsAs `Ex 1 Distinct One Field Result` } "Ex 2 Distinct Two Field Tuple`" in { - testContext.run(`Ex 2 Distinct Two Field Tuple`) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` + testContext.run( + `Ex 2 Distinct Two Field Tuple` + ) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` } "Ex 2a Distinct Two Field Tuple Same Element`" in { - testContext.run(`Ex 2a Distinct Two Field Tuple Same Element`) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` + testContext.run( + `Ex 2a Distinct Two Field Tuple Same Element` + ) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` } "Ex 3 Distinct Two Field Case Class`" in { - testContext.run(`Ex 3 Distinct Two Field Case Class`) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` + testContext.run( + `Ex 3 Distinct Two Field Case Class` + ) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` } "Ex 4-base non-Distinct Subquery`" in { - testContext.run(`Ex 4-base non-Distinct Subquery`) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` + testContext.run( + `Ex 4-base non-Distinct Subquery` + ) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` } "Ex 4 Distinct Subquery`" in { testContext.run(`Ex 4 Distinct Subquery`) should contain theSameElementsAs `Ex 4 Distinct Subquery Result` } "Ex 5 Distinct Subquery with Map Single Field" in { - testContext.run(`Ex 5 Distinct Subquery with Map Single Field`) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` + testContext.run( + `Ex 5 Distinct Subquery with Map Single Field` + ) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` } "Ex 6 Distinct Subquery with Map Multi Field" in { - testContext.run(`Ex 6 Distinct Subquery with Map Multi Field`) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` + testContext.run( + `Ex 6 Distinct Subquery with Map Multi Field` + ) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` } "Ex 7 Distinct Subquery with Map Multi Field Tuple" in { - testContext.run(`Ex 7 Distinct Subquery with Map Multi Field Tuple`) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` + testContext.run( + `Ex 7 Distinct Subquery with Map Multi Field Tuple` + ) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` } "Ex 8 Distinct With Sort" in { testContext.run(`Ex 8 Distinct With Sort`) mustEqual `Ex 8 Distinct With Sort Result` diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/JdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/JdbcEncodingSpec.scala index 68b5cac2b..16c511dc1 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/JdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/JdbcEncodingSpec.scala @@ -18,7 +18,7 @@ class JdbcEncodingSpec extends EncodingSpec { "Encode/Decode Other Time Types" in { context.run(query[TimeEntity].delete) - val zid = ZoneId.systemDefault() + val zid = ZoneId.systemDefault() val timeEntity = TimeEntity.make(zid) context.run(query[TimeEntity].insertValue(lift(timeEntity))) val actual = context.run(query[TimeEntity]).head diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/OptionJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/OptionJdbcSpec.scala index 0e307a276..8642b1f66 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/OptionJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/OptionJdbcSpec.scala @@ -28,7 +28,9 @@ class OptionJdbcSpec extends OptionQuerySpec { } "Example 1.2 - Simple Map with Condition and GetOrElse" in { - testContext.run(`Simple Map with Condition and GetOrElse`) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` + testContext.run( + `Simple Map with Condition and GetOrElse` + ) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` } "Example 2 - Simple GetOrElse" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/ProductJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/ProductJdbcSpec.scala index d5623e92a..72be618c1 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/ProductJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/ProductJdbcSpec.scala @@ -17,13 +17,13 @@ class ProductJdbcSpec extends ProductSpec { "Insert multiple products" in { val inserted = productEntries.map(product => testContext.run(productInsert(lift(product)))) val id: Long = inserted(2) - val product = testContext.run(productById(lift(id))).head + val product = testContext.run(productById(lift(id))).head product.description mustEqual productEntries(2).description product.id mustEqual inserted(2) } "Single insert product" in { val inserted = testContext.run(productSingleInsert) - val product = testContext.run(productById(lift(inserted))).head + val product = testContext.run(productById(lift(inserted))).head product.description mustEqual "Window" product.id mustEqual inserted } @@ -55,7 +55,7 @@ class ProductJdbcSpec extends ProductSpec { val q1 = quote { product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) } - val inserted = testContext.run(q1) + val inserted = testContext.run(q1) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test2" returnedProduct.sku mustEqual 2L @@ -63,8 +63,8 @@ class ProductJdbcSpec extends ProductSpec { } "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val inserted = testContext.run(productInsert(lift(prd))) + val prd = Product(0L, "test3", 3L) + val inserted = testContext.run(productInsert(lift(prd))) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test3" returnedProduct.sku mustEqual 3L diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/package.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/package.scala index 61304a2bc..9a9bf1185 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/package.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlite/package.scala @@ -1,10 +1,14 @@ package io.getquill.context.jdbc -import io.getquill.context.sql.{ TestDecoders, TestEncoders } -import io.getquill.{ Literal, SqliteJdbcContext, TestEntities } +import io.getquill.context.sql.{TestDecoders, TestEncoders} +import io.getquill.{Literal, SqliteJdbcContext, TestEntities} package object sqlite { - object testContext extends SqliteJdbcContext(Literal, "testSqliteDB") with TestEntities with TestEncoders with TestDecoders + object testContext + extends SqliteJdbcContext(Literal, "testSqliteDB") + with TestEntities + with TestEncoders + with TestDecoders } diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/BatchValuesJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/BatchValuesJdbcSpec.scala index e98bf8be8..e2d9a64d8 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/BatchValuesJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/BatchValuesJdbcSpec.scala @@ -3,7 +3,6 @@ package io.getquill.context.jdbc.sqlserver import io.getquill.context.sql.BatchValuesSpec import io.getquill._ - class BatchValuesJdbcSpec extends BatchValuesSpec { // val context = testContext @@ -45,4 +44,4 @@ class BatchValuesJdbcSpec extends BatchValuesSpec { // testContext.run(splicedOp, batchSize) testContext.run(get) mustEqual result } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DepartmentsJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DepartmentsJdbcSpec.scala index 5329da934..911571239 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DepartmentsJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DepartmentsJdbcSpec.scala @@ -28,4 +28,4 @@ class DepartmentsJdbcSpec extends DepartmentsSpec { "Example 9 - nested db" in { testContext.run(`Example 9 expertise`(lift(`Example 9 param`))) mustEqual `Example 9 expected result` } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DistinctJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DistinctJdbcSpec.scala index a7744bec3..3fe3d5f90 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DistinctJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/DistinctJdbcSpec.scala @@ -23,28 +23,42 @@ class DistinctJdbcSpec extends DistinctSpec { testContext.run(`Ex 1 Distinct One Field`) should contain theSameElementsAs `Ex 1 Distinct One Field Result` } "Ex 2 Distinct Two Field Tuple`" in { - testContext.run(`Ex 2 Distinct Two Field Tuple`) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` + testContext.run( + `Ex 2 Distinct Two Field Tuple` + ) should contain theSameElementsAs `Ex 2 Distinct Two Field Tuple Result` } "Ex 2a Distinct Two Field Tuple Same Element`" in { - testContext.run(`Ex 2a Distinct Two Field Tuple Same Element`) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` + testContext.run( + `Ex 2a Distinct Two Field Tuple Same Element` + ) should contain theSameElementsAs `Ex 2a Distinct Two Field Tuple Same Element Result` } "Ex 3 Distinct Two Field Case Class`" in { - testContext.run(`Ex 3 Distinct Two Field Case Class`) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` + testContext.run( + `Ex 3 Distinct Two Field Case Class` + ) should contain theSameElementsAs `Ex 3 Distinct Two Field Case Class Result` } "Ex 4-base non-Distinct Subquery`" in { - testContext.run(`Ex 4-base non-Distinct Subquery`) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` + testContext.run( + `Ex 4-base non-Distinct Subquery` + ) should contain theSameElementsAs `Ex 4-base non-Distinct Subquery Result` } "Ex 4 Distinct Subquery`" in { testContext.run(`Ex 4 Distinct Subquery`) should contain theSameElementsAs `Ex 4 Distinct Subquery Result` } "Ex 5 Distinct Subquery with Map Single Field" in { - testContext.run(`Ex 5 Distinct Subquery with Map Single Field`) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` + testContext.run( + `Ex 5 Distinct Subquery with Map Single Field` + ) should contain theSameElementsAs `Ex 5 Distinct Subquery with Map Single Field Result` } "Ex 6 Distinct Subquery with Map Multi Field" in { - testContext.run(`Ex 6 Distinct Subquery with Map Multi Field`) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` + testContext.run( + `Ex 6 Distinct Subquery with Map Multi Field` + ) should contain theSameElementsAs `Ex 6 Distinct Subquery with Map Multi Field Result` } "Ex 7 Distinct Subquery with Map Multi Field Tuple" in { - testContext.run(`Ex 7 Distinct Subquery with Map Multi Field Tuple`) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` + testContext.run( + `Ex 7 Distinct Subquery with Map Multi Field Tuple` + ) should contain theSameElementsAs `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` } "Ex 8 Distinct With Sort" in { testContext.run(`Ex 8 Distinct With Sort`) mustEqual `Ex 8 Distinct With Sort Result` diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcContextSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcContextSpec.scala index 2ce5ef15e..22e217b6e 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcContextSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcContextSpec.scala @@ -1,6 +1,6 @@ package io.getquill.context.jdbc.sqlserver -import io.getquill.{ Literal, Spec, SqlServerJdbcContext } +import io.getquill.{Literal, Spec, SqlServerJdbcContext} import io.getquill._ class JdbcContextSpec extends Spec { @@ -81,7 +81,9 @@ class JdbcContextSpec extends Spec { "with multiple columns and operations" in { ctx.run(qr1.delete) val inserted = ctx.run { - qr1.insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) + qr1 + .insertValue(lift(TestEntity("foo", 1, 18L, Some(123), true))) + .returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) } (1 + 100, "foo", Some(123 + 100)) mustBe inserted } @@ -90,8 +92,7 @@ class JdbcContextSpec extends Spec { ctx.run(qr1Emb.delete) ctx.run(qr1Emb.insertValue(lift(TestEntityEmb(Emb("one", 1), 18L, Some(123))))) val inserted = ctx.run { - qr1Emb.insertValue(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => - (r.emb.i, r.o)) + qr1Emb.insertValue(lift(TestEntityEmb(Emb("two", 2), 18L, Some(123)))).returning(r => (r.emb.i, r.o)) } (2, Some(123)) mustBe inserted } @@ -122,7 +123,9 @@ class JdbcContextSpec extends Spec { ctx.run(qr1.insertValue(lift(TestEntity("baz", 6, 42L, Some(456), true)))) val updated = ctx.run { - qr1.updateValue(lift(TestEntity("foo", 1, 18L, Some(123), true))).returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) + qr1 + .updateValue(lift(TestEntity("foo", 1, 18L, Some(123), true))) + .returning(r => (r.i + 100, r.s, r.o.map(_ + 100))) } (1 + 100, "foo", Some(123 + 100)) mustBe updated } @@ -197,4 +200,4 @@ class PendingUntilFixed extends Spec { "remove this once sqlserver build established" in { new SqlServerJdbcContext(Literal, "testPostgresDB").close() } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcEncodingSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcEncodingSpec.scala index 0cdc9a379..97f931fc3 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcEncodingSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/JdbcEncodingSpec.scala @@ -19,19 +19,18 @@ class JdbcEncodingSpec extends EncodingSpec { "encodes sets" in { testContext.run(query[EncodingTestEntity].delete) testContext.run(liftQuery(insertValues).foreach(p => query[EncodingTestEntity].insertValue(p))) - val q = quote { - (set: Query[Int]) => - query[EncodingTestEntity].filter(t => set.contains(t.v6)) + val q = quote { (set: Query[Int]) => + query[EncodingTestEntity].filter(t => set.contains(t.v6)) } verify(testContext.run(q(liftQuery(insertValues.map(_.v6).toSet)))) } "Encode/Decode Other Time Types" in { context.run(query[TimeEntity].delete) - val zid = ZoneId.systemDefault() + val zid = ZoneId.systemDefault() val timeEntity = TimeEntity.make(zid) context.run(query[TimeEntity].insertValue(lift(timeEntity))) val actual = context.run(query[TimeEntity]).head timeEntity mustEqual actual } -} \ No newline at end of file +} diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/OptionJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/OptionJdbcSpec.scala index a06e6354c..94dfeb6a2 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/OptionJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/OptionJdbcSpec.scala @@ -21,8 +21,8 @@ class OptionJdbcSpec extends OptionQuerySpec { // Hack because Quill does not have correct SQL Server infix concatenation. See issue #1054 for more info. val `Simple Map with GetOrElse Infix` = quote { - query[Address].map( - a => (a.street, a.otherExtraInfo.map(info => sql"${info} + ' suffix'".as[String]).getOrElse("baz")) + query[Address].map(a => + (a.street, a.otherExtraInfo.map(info => sql"${info} + ' suffix'".as[String]).getOrElse("baz")) ) } @@ -31,7 +31,9 @@ class OptionJdbcSpec extends OptionQuerySpec { } "Example 1.0.1 - Simple Map with GetOrElse Infix" in { - testContext.run(`Simple Map with GetOrElse Infix`) should contain theSameElementsAs `Simple Map with GetOrElse Result` + testContext.run( + `Simple Map with GetOrElse Infix` + ) should contain theSameElementsAs `Simple Map with GetOrElse Result` } "Example 1.1 - Simple Map with GetOrElse" in { @@ -39,7 +41,9 @@ class OptionJdbcSpec extends OptionQuerySpec { } "Example 1.2 - Simple Map with Condition and GetOrElse" in { - testContext.run(`Simple Map with Condition and GetOrElse`) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` + testContext.run( + `Simple Map with Condition and GetOrElse` + ) should contain theSameElementsAs `Simple Map with Condition and GetOrElse Result` } "Example 2 - Simple GetOrElse" in { diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/ProductJdbcSpec.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/ProductJdbcSpec.scala index 104808e60..33b78a518 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/ProductJdbcSpec.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/ProductJdbcSpec.scala @@ -17,14 +17,14 @@ class ProductJdbcSpec extends ProductSpec { "Insert multiple products" in { val inserted = productEntries.map(product => testContext.run(productInsert(lift(product)))) val id: Long = inserted(2) - val product = testContext.run(productById(lift(id))).head + val product = testContext.run(productById(lift(id))).head product.description mustEqual productEntries(2).description product.id mustEqual inserted(2) } "Single insert product" in { val inserted = testContext.run(productSingleInsert) - val product = testContext.run(productById(lift(inserted))).head + val product = testContext.run(productById(lift(inserted))).head product.description mustEqual "Window" product.id mustEqual inserted } @@ -45,7 +45,7 @@ class ProductJdbcSpec extends ProductSpec { val q1 = quote { product.insert(_.sku -> lift(prd.sku), _.description -> lift(prd.description)).returningGenerated(_.id) } - val inserted = testContext.run(q1) + val inserted = testContext.run(q1) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test2" returnedProduct.sku mustEqual 2L @@ -53,8 +53,8 @@ class ProductJdbcSpec extends ProductSpec { } "Single product insert with a method quotation" in { - val prd = Product(0L, "test3", 3L) - val inserted = testContext.run(productInsert(lift(prd))) + val prd = Product(0L, "test3", 3L) + val inserted = testContext.run(productInsert(lift(prd))) val returnedProduct = testContext.run(productById(lift(inserted))).head returnedProduct.description mustEqual "test3" returnedProduct.sku mustEqual 3L diff --git a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/package.scala b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/package.scala index 69fe2567c..68729ba95 100644 --- a/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/package.scala +++ b/quill-jdbc/src/test/scala/io/getquill/context/jdbc/sqlserver/package.scala @@ -1,9 +1,13 @@ package io.getquill.context.jdbc import io.getquill._ -import io.getquill.context.sql.{ TestDecoders, TestEncoders } +import io.getquill.context.sql.{TestDecoders, TestEncoders} package object sqlserver { - object testContext extends SqlServerJdbcContext(Literal, "testSqlServerDB") with TestEntities with TestEncoders with TestDecoders -} \ No newline at end of file + object testContext + extends SqlServerJdbcContext(Literal, "testSqlServerDB") + with TestEntities + with TestEncoders + with TestDecoders +} diff --git a/quill-sql-tests/src/test/scala/io/getquill/DynamicQuerySpec.scala b/quill-sql-tests/src/test/scala/io/getquill/DynamicQuerySpec.scala index e9de3175c..c1f6de145 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/DynamicQuerySpec.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/DynamicQuerySpec.scala @@ -644,7 +644,7 @@ class DynamicQuerySpec extends Spec { // // } "filterOpt" in { val o = Some(1) - val q = dynamicQuery[TestEntity].filterOpt(o) { (t, i) => quote(t.i === unquote(i)) } + val q = dynamicQuery[TestEntity].filterOpt(o)((t, i) => quote(t.i === unquote(i))) testContext.run(q).string mustEqual ("""querySchema("TestEntity").filter(v0 => v0.i == ?)""") } "update" in { diff --git a/quill-sql-tests/src/test/scala/io/getquill/InsertAdvancedSpec.scala b/quill-sql-tests/src/test/scala/io/getquill/InsertAdvancedSpec.scala index ecf755343..c03e4ff76 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/InsertAdvancedSpec.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/InsertAdvancedSpec.scala @@ -20,27 +20,27 @@ class InsertAdvancedSpec extends Spec with Inside { "updateValue with various dynamic structures" - { val joe = Person("Joe", 123) - val v = quote { query[Person] } + val v = quote(query[Person]) "dynamic EntityQuery" in { ctx.run(v.filter(u => u.age == 55).updateValue(lift(joe))).triple mustEqual - ("UPDATE Person AS u SET name = ?, age = ? WHERE u.age = 55",List("Joe", 123),Dynamic) + ("UPDATE Person AS u SET name = ?, age = ? WHERE u.age = 55", List("Joe", 123), Dynamic) } "dynamic EntityQuery with lift" in { ctx.run(v.filter(u => u.age == lift(55)).updateValue(lift(joe))).triple mustEqual - ("UPDATE Person AS u SET name = ?, age = ? WHERE u.age = ?",List("Joe", 123, 55),Dynamic) + ("UPDATE Person AS u SET name = ?, age = ? WHERE u.age = ?", List("Joe", 123, 55), Dynamic) } "dynamic EntityQuery multiple indirection" in { - val v1 = quote { v.filter(u => u.age == 55) } + val v1 = quote(v.filter(u => u.age == 55)) ctx.run(v1.updateValue(lift(joe))).triple mustEqual - ("UPDATE Person AS u SET name = ?, age = ? WHERE u.age = 55",List("Joe", 123),Dynamic) + ("UPDATE Person AS u SET name = ?, age = ? WHERE u.age = 55", List("Joe", 123), Dynamic) } } "insert for simple entity should work for" - { // Insert(Entity("Person", List()), List(Assignment(Id("x1"), Property(Id("x1"), "name"), "Joe"), Assignment(Id("x2"), Property(Id("x2"), "age"), 123))) "simple, inline query" - { - inline def a = quote { query[Person].insert(_.name -> "Joe", _.age -> 123) } // Insert "assignment form" - inline def q = quote { query[Person].insertValue(Person("Joe", 123)) } // Insert entity form + inline def a = quote(query[Person].insert(_.name -> "Joe", _.age -> 123)) // Insert "assignment form" + inline def q = quote(query[Person].insertValue(Person("Joe", 123))) // Insert entity form "regular" in { ctx.run(q).triple mustEqual ("INSERT INTO Person (name,age) VALUES ('Joe', 123)", List(), Static) ctx.run(a).triple mustEqual ("INSERT INTO Person (name,age) VALUES ('Joe', 123)", List(), Static) @@ -68,22 +68,22 @@ class InsertAdvancedSpec extends Spec with Inside { // TODO Doing this with a runtime query should throw an exception (for now) "simple with schemaMeta and insert meta" in { inline given personMeta: InsertMeta[Person] = insertMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") ctx.run(q).triple mustEqual ("INSERT INTO tblPerson (colName) VALUES ('Joe')", List(), Static) ctx.run(a).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Static) } // TODO Doing this with a runtime query should throw an exception (for now) "simple with schemaMeta with extra columns and insert meta" in { inline given personSchema: InsertMeta[Person] = insertMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") ctx.run(q).triple mustEqual ("INSERT INTO tblPerson (colName) VALUES ('Joe')", List(), Static) ctx.run(a).triple mustEqual ("INSERT INTO tblPerson (colName,colAge) VALUES ('Joe', 123)", List(), Static) } } "simple - runtime" in { - val a = quote { query[Person].insert(_.name -> "Joe", _.age -> 123) } - val q = quote { query[Person].insertValue(Person("Joe", 123)) } + val a = quote(query[Person].insert(_.name -> "Joe", _.age -> 123)) + val q = quote(query[Person].insertValue(Person("Joe", 123))) ctx.run(a).triple mustEqual ("INSERT INTO Person (name,age) VALUES ('Joe', 123)", List(), Dynamic) ctx.run(q).triple mustEqual ("INSERT INTO Person (name,age) VALUES ('Joe', 123)", List(), Dynamic) } @@ -113,8 +113,8 @@ class InsertAdvancedSpec extends Spec with Inside { "update for simple entity should work for" - { // Insert(Entity("Person", List()), List(Assignment(Id("x1"), Property(Id("x1"), "name"), "Joe"), Assignment(Id("x2"), Property(Id("x2"), "age"), 123))) "simple, inline query" - { - inline def a = quote { query[Person].update(_.name -> "Joe", _.age -> 123) } // Insert "assignment form" - inline def q = quote { query[Person].updateValue(Person("Joe", 123)) } // Insert entity form + inline def a = quote(query[Person].update(_.name -> "Joe", _.age -> 123)) // Insert "assignment form" + inline def q = quote(query[Person].updateValue(Person("Joe", 123))) // Insert entity form "regular" in { ctx.run(q).triple mustEqual ("UPDATE Person SET name = 'Joe', age = 123", List(), Static) ctx.run(a).triple mustEqual ("UPDATE Person SET name = 'Joe', age = 123", List(), Static) @@ -133,67 +133,106 @@ class InsertAdvancedSpec extends Spec with Inside { // TODO Doing this with a runtime query should throw an exception (for now) "simple with schemaMeta and update meta" in { inline given personMeta: UpdateMeta[Person] = updateMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") ctx.run(q).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe'", List(), Static) ctx.run(a).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Static) } // TODO Doing this with a runtime query should throw an exception (for now) "simple with schemaMeta with extra columns and update meta" in { inline given personSchema: UpdateMeta[Person] = updateMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") ctx.run(q).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe'", List(), Static) ctx.run(a).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', colAge = 123", List(), Static) } "simple with schemaMeta with extra columns and update meta fully lifted" in { - inline def q = quote { query[Person].filter(e => e.name == "JoeJoe").updateValue(lift(Person("Joe", 123))) } - inline def a = quote { query[Person].filter(e => e.name == "JoeJoe").update(_.name -> "Joe", _.age -> 123) } + inline def q = quote(query[Person].filter(e => e.name == "JoeJoe").updateValue(lift(Person("Joe", 123)))) + inline def a = quote(query[Person].filter(e => e.name == "JoeJoe").update(_.name -> "Joe", _.age -> 123)) inline given personSchema: UpdateMeta[Person] = updateMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") - ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = ? WHERE e.colName = 'JoeJoe'", List("Joe"), Static) - ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = 123 WHERE e.colName = 'JoeJoe'", List(), Static) + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") + ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = ? WHERE e.colName = 'JoeJoe'", List( + "Joe" + ), Static) + ctx + .run(a) + .triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = 123 WHERE e.colName = 'JoeJoe'", List(), Static) } "simple with schemaMeta with extra columns and update meta fully lifted with filter lift" in { - inline def q = quote { query[Person].filter(e => e.name == lift("JoeJoe")).updateValue(lift(Person("Joe", 123))) } - inline def a = quote { query[Person].filter(e => e.name == lift("JoeJoe")).update(_.name -> "Joe", _.age -> 123) } + inline def q = quote { + query[Person].filter(e => e.name == lift("JoeJoe")).updateValue(lift(Person("Joe", 123))) + } + inline def a = quote { + query[Person].filter(e => e.name == lift("JoeJoe")).update(_.name -> "Joe", _.age -> 123) + } inline given personSchema: UpdateMeta[Person] = updateMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") - ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = ? WHERE e.colName = ?", List("Joe", "JoeJoe"), Static) - ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = 123 WHERE e.colName = ?", List("JoeJoe"), Static) + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") + ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = ? WHERE e.colName = ?", List( + "Joe", + "JoeJoe" + ), Static) + ctx + .run(a) + .triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = 123 WHERE e.colName = ?", List( + "JoeJoe" + ), Static) } "simple with schemaMeta with extra columns and update meta filter lift - included column" in { - inline def q = quote { query[Person].filter(e => e.name == lift("JoeJoe")).updateValue(Person(lift("Joe"), 123)) } - inline def a = quote { query[Person].filter(e => e.name == lift("JoeJoe")).update(_.name -> lift("Joe"), _.age -> 123) } + inline def q = quote { + query[Person].filter(e => e.name == lift("JoeJoe")).updateValue(Person(lift("Joe"), 123)) + } + inline def a = quote { + query[Person].filter(e => e.name == lift("JoeJoe")).update(_.name -> lift("Joe"), _.age -> 123) + } inline given personSchema: UpdateMeta[Person] = updateMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") - ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = ? WHERE e.colName = ?", List("Joe", "JoeJoe"), Static) - ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = ?, colAge = 123 WHERE e.colName = ?", List("Joe", "JoeJoe"), Static) + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") + ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = ? WHERE e.colName = ?", List( + "Joe", + "JoeJoe" + ), Static) + ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = ?, colAge = 123 WHERE e.colName = ?", List( + "Joe", + "JoeJoe" + ), Static) } "simple with schemaMeta with extra columns and update meta filter lift - excluded column" in { - inline def q = quote { query[Person].filter(e => e.name == lift("JoeJoe")).updateValue(Person("Joe", lift(123))) } - inline def a = quote { query[Person].filter(e => e.name == lift("JoeJoe")).update(_.name -> "Joe", _.age -> lift(123)) } + inline def q = quote { + query[Person].filter(e => e.name == lift("JoeJoe")).updateValue(Person("Joe", lift(123))) + } + inline def a = quote { + query[Person].filter(e => e.name == lift("JoeJoe")).update(_.name -> "Joe", _.age -> lift(123)) + } inline given personSchema: UpdateMeta[Person] = updateMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") - ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe' WHERE e.colName = ?", List("JoeJoe"), Static) - ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = ? WHERE e.colName = ?", List(123, "JoeJoe"), Static) + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") + ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe' WHERE e.colName = ?", List( + "JoeJoe" + ), Static) + ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = ? WHERE e.colName = ?", List( + 123, + "JoeJoe" + ), Static) } "simple with schemaMeta with extra columns and update meta filter lift - filter by excluded column" in { - inline def q = quote { query[Person].filter(e => e.age == lift(123)).updateValue(Person("Joe", lift(123))) } - inline def a = quote { query[Person].filter(e => e.age == lift(123)).update(_.name -> "Joe", _.age -> lift(123)) } + inline def q = quote(query[Person].filter(e => e.age == lift(123)).updateValue(Person("Joe", lift(123)))) + inline def a = quote { + query[Person].filter(e => e.age == lift(123)).update(_.name -> "Joe", _.age -> lift(123)) + } inline given personSchema: UpdateMeta[Person] = updateMeta[Person](_.age) - inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") + inline given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName", _.age -> "colAge") ctx.run(q).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe' WHERE e.colAge = ?", List(123), Static) - ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = ? WHERE e.colAge = ?", List(123, 123), Static) + ctx.run(a).triple mustEqual ("UPDATE tblPerson AS e SET colName = 'Joe', colAge = ? WHERE e.colAge = ?", List( + 123, + 123 + ), Static) } } "simple - runtime" in { - val a = quote { query[Person].update(_.name -> "Joe", _.age -> 123) } - val q = quote { query[Person].updateValue(Person("Joe", 123)) } + val a = quote(query[Person].update(_.name -> "Joe", _.age -> 123)) + val q = quote(query[Person].updateValue(Person("Joe", 123))) ctx.run(a).triple mustEqual ("UPDATE Person SET name = 'Joe', age = 123", List(), Dynamic) ctx.run(q).triple mustEqual ("UPDATE Person SET name = 'Joe', age = 123", List(), Dynamic) } @@ -211,7 +250,7 @@ class InsertAdvancedSpec extends Spec with Inside { } "auto-quote with lift" in { - val result = ctx.run(query[Person]updateValue(Person(lift("Joe"), 123))) + val result = ctx.run(query[Person] updateValue (Person(lift("Joe"), 123))) result.triple mustEqual ( "UPDATE Person SET name = ?, age = 123", List("Joe"), @@ -223,17 +262,17 @@ class InsertAdvancedSpec extends Spec with Inside { // Variation of this with only InsertMeta, and well as both InsertMeta and SchemaMeta (inline and dynamic) "entity insert with dynamic components should work for" - { // "given queries in an outer scope" - { - inline def a = quote { query[Person].insert(_.name -> "Joe", _.age -> 123) } // Insert "assignment form" - inline def q = quote { query[Person].insertValue(Person("Joe", 123)) } // Insert entity form - val adyn = quote { query[Person].insert(_.name -> "Joe", _.age -> 123) } // Dynamic Insert "assignment form" - val qdyn = quote { query[Person].insertValue(Person("Joe", 123)) } // Dynamic Insert entity form + inline def a = quote(query[Person].insert(_.name -> "Joe", _.age -> 123)) // Insert "assignment form" + inline def q = quote(query[Person].insertValue(Person("Joe", 123))) // Insert entity form + val adyn = quote(query[Person].insert(_.name -> "Joe", _.age -> 123)) // Dynamic Insert "assignment form" + val qdyn = quote(query[Person].insertValue(Person("Joe", 123))) // Dynamic Insert entity form "dynamic schema makes whole query dynamic - it will plug into runtime queries post-facto" in { given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") // For static queries `insert` macro is only being evaluated right here so `given sm` will change names - ctx.run(q).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) - ctx.run(a).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) + ctx.run(q).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) + ctx.run(a).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) // For dynamic queries `insert` macro is already evaluated therefore `given sm` will not change the column names ctx.run(qdyn).triple mustEqual ("INSERT INTO Person (name,age) VALUES ('Joe', 123)", List(), Dynamic) @@ -244,17 +283,17 @@ class InsertAdvancedSpec extends Spec with Inside { "(update) entity insert with dynamic components should work for" - { "given queries in an outer scope" - { - inline def a = quote { query[Person].update(_.name -> "Joe", _.age -> 123) } // Insert "assignment form" - inline def q = quote { query[Person].updateValue(Person("Joe", 123)) } // Insert entity form - val adyn = quote { query[Person].update(_.name -> "Joe", _.age -> 123) } // Dynamic Insert "assignment form" - val qdyn = quote { query[Person].updateValue(Person("Joe", 123)) } // Dynamic Insert entity form + inline def a = quote(query[Person].update(_.name -> "Joe", _.age -> 123)) // Insert "assignment form" + inline def q = quote(query[Person].updateValue(Person("Joe", 123))) // Insert entity form + val adyn = quote(query[Person].update(_.name -> "Joe", _.age -> 123)) // Dynamic Insert "assignment form" + val qdyn = quote(query[Person].updateValue(Person("Joe", 123))) // Dynamic Insert entity form "dynamic schema makes whole query dynamic - it will plug into runtime queries post-facto" in { given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") // For static queries `insert` macro is only being evaluated right here so `given sm` will change names - ctx.run(q).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) - ctx.run(a).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) + ctx.run(q).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) + ctx.run(a).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) // For dynamic queries `insert` macro is already evaluated therefore `given sm` will not change the column names ctx.run(qdyn).triple mustEqual ("UPDATE Person SET name = 'Joe', age = 123", List(), Dynamic) @@ -266,15 +305,15 @@ class InsertAdvancedSpec extends Spec with Inside { // TODO Variation of this with only InsertMeta, and well as both InsertMeta and SchemaMeta (inline, and dynamic) "given queries in an outer scope - with the given already there" - { given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") - inline def a = quote { query[Person].insert(_.name -> "Joe", _.age -> 123) } // Insert "assignment form" - inline def q = quote { query[Person].insertValue(Person("Joe", 123)) } // Insert entity form - val adyn = quote { query[Person].insert(_.name -> "Joe", _.age -> 123) } // Dynamic Insert "assignment form" - val qdyn = quote { query[Person].insertValue(Person("Joe", 123)) } // Dynamic Insert entity form + inline def a = quote(query[Person].insert(_.name -> "Joe", _.age -> 123)) // Insert "assignment form" + inline def q = quote(query[Person].insertValue(Person("Joe", 123))) // Insert entity form + val adyn = quote(query[Person].insert(_.name -> "Joe", _.age -> 123)) // Dynamic Insert "assignment form" + val qdyn = quote(query[Person].insertValue(Person("Joe", 123))) // Dynamic Insert entity form // Since schema meta has been plugged in already, all behaviors are the same "dynamic schema plugs in and makes all queries dynamic" in { - ctx.run(q).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) - ctx.run(a).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) + ctx.run(q).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) + ctx.run(a).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) ctx.run(qdyn).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) ctx.run(adyn).triple mustEqual ("INSERT INTO tblPerson (colName,age) VALUES ('Joe', 123)", List(), Dynamic) } @@ -283,17 +322,17 @@ class InsertAdvancedSpec extends Spec with Inside { // TODO Variation of this with only InsertMeta, and well as both InsertMeta and SchemaMeta (inline, and dynamic) "(update) given queries in an outer scope - with the given already there" - { given sm: SchemaMeta[Person] = schemaMeta("tblPerson", _.name -> "colName") - inline def a = quote { query[Person].update(_.name -> "Joe", _.age -> 123) } // Insert "assignment form" - inline def q = quote { query[Person].updateValue(Person("Joe", 123)) } // Insert entity form - val adyn = quote { query[Person].update(_.name -> "Joe", _.age -> 123) } // Dynamic Insert "assignment form" - val qdyn = quote { query[Person].updateValue(Person("Joe", 123)) } // Dynamic Insert entity form + inline def a = quote(query[Person].update(_.name -> "Joe", _.age -> 123)) // Insert "assignment form" + inline def q = quote(query[Person].updateValue(Person("Joe", 123))) // Insert entity form + val adyn = quote(query[Person].update(_.name -> "Joe", _.age -> 123)) // Dynamic Insert "assignment form" + val qdyn = quote(query[Person].updateValue(Person("Joe", 123))) // Dynamic Insert entity form // Since schema meta has been plugged in already, all behaviors are the same "dynamic schema plugs in and makes all queries dynamic" in { - ctx.run(q).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) - ctx.run(a).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) + ctx.run(q).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) + ctx.run(a).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) ctx.run(qdyn).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) ctx.run(adyn).triple mustEqual ("UPDATE tblPerson SET colName = 'Joe', age = 123", List(), Dynamic) } } -} \ No newline at end of file +} diff --git a/quill-sql-tests/src/test/scala/io/getquill/TestContexts.scala b/quill-sql-tests/src/test/scala/io/getquill/TestContexts.scala index 5a44a251e..d1ee1f6e7 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/TestContexts.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/TestContexts.scala @@ -1,4 +1,4 @@ package io.getquill -object testContext extends TestMirrorContextTemplate(MirrorIdiom, Literal) with TestEntities +object testContext extends TestMirrorContextTemplate(MirrorIdiom, Literal) with TestEntities object testContextSnake extends TestMirrorContextTemplate[MirrorSqlDialect, SnakeCase](MirrorSqlDialect, SnakeCase) diff --git a/quill-sql-tests/src/test/scala/io/getquill/TestMirrorContextTemplate.scala b/quill-sql-tests/src/test/scala/io/getquill/TestMirrorContextTemplate.scala index dea5c1a67..01406cc6f 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/TestMirrorContextTemplate.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/TestMirrorContextTemplate.scala @@ -1,6 +1,6 @@ package io.getquill -import io.getquill.context.{ CanReturnField, CanReturnMultiField, CannotReturn } +import io.getquill.context.{CanReturnField, CanReturnMultiField, CannotReturn} import io.getquill.idiom.Idiom import io.getquill.norm.EqualityBehavior import io.getquill.norm.EqualityBehavior.NonAnsiEquality @@ -8,7 +8,8 @@ import io.getquill.context.sql.TestEncoders import io.getquill.context.sql.TestDecoders class TestMirrorContextTemplate[+Dialect <: Idiom, +Naming <: NamingStrategy](dialect: Dialect, naming: Naming) - extends MirrorContext[Dialect, Naming](dialect, naming) with TestEntities { + extends MirrorContext[Dialect, Naming](dialect, naming) + with TestEntities { def withDialect[I <: Idiom](dialect: I)(f: TestMirrorContextTemplate[I, Naming] => Any): Unit = { val ctx = new TestMirrorContextTemplate[I, Naming](dialect, naming) @@ -18,13 +19,13 @@ class TestMirrorContextTemplate[+Dialect <: Idiom, +Naming <: NamingStrategy](di } // Mirror idiom supporting only single-field returning clauses -trait MirrorIdiomReturningSingle extends MirrorIdiomBase with CanReturnField +trait MirrorIdiomReturningSingle extends MirrorIdiomBase with CanReturnField object MirrorIdiomReturningSingle extends MirrorIdiomReturningSingle // Mirror idiom supporting only multi-field returning clauses -trait MirrorIdiomReturningMulti extends MirrorIdiomBase with CanReturnMultiField +trait MirrorIdiomReturningMulti extends MirrorIdiomBase with CanReturnMultiField object MirrorIdiomReturningMulti extends MirrorIdiomReturningMulti // Mirror idiom not supporting any returns -trait MirrorIdiomReturningUnsupported extends MirrorIdiomBase with CannotReturn +trait MirrorIdiomReturningUnsupported extends MirrorIdiomBase with CannotReturn object MirrorIdiomReturningUnsupported extends MirrorIdiomReturningUnsupported diff --git a/quill-sql-tests/src/test/scala/io/getquill/customparser/CustomParseTest.scala b/quill-sql-tests/src/test/scala/io/getquill/customparser/CustomParseTest.scala index 9209668c6..13b75de42 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/customparser/CustomParseTest.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/customparser/CustomParseTest.scala @@ -5,7 +5,7 @@ import io.getquill.ast._ import io.getquill.Quoted import io.getquill.customparser.CustomParser import io.getquill.customparser.CustomOps -import io.getquill.{ defaultParser => _, _ } +import io.getquill.{defaultParser => _, _} import org.scalatest._ @@ -17,8 +17,8 @@ class CustomParseTest extends Spec with Inside { given myParser: CustomParser.type = CustomParser import CustomOps._ case class Person(name: String, age: Int) - inline def q = quote { query[Person].map(p => p.age ** 2) } + inline def q = quote(query[Person].map(p => p.age ** 2)) ctx.run(q).string mustEqual "SELECT power(p.age ,2) FROM Person p" } -} \ No newline at end of file +} diff --git a/quill-sql-tests/src/test/scala/io/getquill/examples/MiniExample_LiftByKeys.scala b/quill-sql-tests/src/test/scala/io/getquill/examples/MiniExample_LiftByKeys.scala index 78e73213f..5110251fa 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/examples/MiniExample_LiftByKeys.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/examples/MiniExample_LiftByKeys.scala @@ -18,13 +18,11 @@ object MiniExample_LiftByKeys { def regularMapProc() = { inline def q = quote { - query[Person].filter(p => - MapFlicer[Person, PrepareRow, Session](p, values) - ) + query[Person].filter(p => MapFlicer[Person, PrepareRow, Session](p, values)) } val r = run(q) - println( r.string ) - println( r.prepareRow.data.toList) + println(r.string) + println(r.prepareRow.data.toList) } def extensionMapProc() = { @@ -32,8 +30,8 @@ object MiniExample_LiftByKeys { query[Person].filterByKeys(values) } val r = run(q) - println( r.string ) - println( r.prepareRow.data.toList) + println(r.string) + println(r.prepareRow.data.toList) } def extensionColumnsProc() = { @@ -42,7 +40,7 @@ object MiniExample_LiftByKeys { query[Person].filterColumns(columns) } // println(q) - //val r = run(q) //hello + // val r = run(q) //hello // println( r.string ) // println( r.prepareRow.data.toList) } @@ -54,7 +52,7 @@ object MiniExample_LiftByKeys { ( p.firstName = [ values.getOrElse("firstName",null) ] OR [ values.getOrElse("firstName",null) == null ] ) AND ( p.lastName = [ values.getOrElse("lastName",null) ] OR [ values.getOrElse("lastName",null) == null ] ) AND ( p.age = [ values.getOrElse("age",null) ] OR [ values.getOrElse("age",null) == null ] ) AND true - */ + */ def main(args: Array[String]): Unit = { regularMapProc() diff --git a/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassExample_Show.scala b/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassExample_Show.scala index eaff00416..256572a60 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassExample_Show.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassExample_Show.scala @@ -1,12 +1,10 @@ package io.getquill.examples - import scala.language.implicitConversions import io.getquill._ object TypeclassExample_Show { - case class Person(id: Int, name: String, age: Int) val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ @@ -23,15 +21,13 @@ object TypeclassExample_Show { inline def show(inline t: Int): String = t.toString + "-suffix" } - inline def show[T](inline element: T)(using inline shower: Show[T]): String = { + inline def show[T](inline element: T)(using inline shower: Show[T]): String = shower.show(element) - } inline def q = quote { query[Person].map(p => show(p.name) + show(p.age)) } - println( run(q) ) + println(run(q)) - def main(args: Array[String]): Unit = { - } + def main(args: Array[String]): Unit = {} } diff --git a/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassUsecase_Typeclass.scala b/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassUsecase_Typeclass.scala index 5aa143588..a20921366 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassUsecase_Typeclass.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/examples/TypeclassUsecase_Typeclass.scala @@ -1,12 +1,11 @@ package io.getquill.examples - import scala.language.implicitConversions import io.getquill._ import scala.compiletime.{erasedValue, summonFrom, constValue} object TypeclassUsecase_Typeclass { - + case class Address(street: String, zip: Int) extends Embedded val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ @@ -15,52 +14,52 @@ object TypeclassUsecase_Typeclass { case class Master(key: Int, lastCheck: Int, state: String) case class Worker(shard: Int, lastTime: Int, reply: String) - trait GroupKey[T, G] { inline def apply(inline t: T): G } trait EarlierThan[T] { inline def apply(inline a: T, inline b: T): Boolean } - + inline given GroupKey[Node, Int] with { inline def apply(inline t: Node): Int = t.id } inline given GroupKey[Master, Int] with { inline def apply(inline t: Master): Int = t.key } - inline given GroupKey[Worker, Int] with { + inline given GroupKey[Worker, Int] with { inline def apply(inline t: Worker): Int = t.shard } - inline given EarlierThan[Node] with { + inline given EarlierThan[Node] with { inline def apply(inline a: Node, inline b: Node) = a.timestamp < b.timestamp } - inline given EarlierThan[Master] with { + inline given EarlierThan[Master] with { inline def apply(inline a: Master, inline b: Master) = a.lastCheck < b.lastCheck } - inline given EarlierThan[Worker] with { + inline given EarlierThan[Worker] with { inline def apply(inline a: Worker, inline b: Worker) = a.lastTime < b.lastTime } def main(args: Array[String]): Unit = { - - inline def latestStatus[T, G](inline q: Query[T])(using inline groupKey: GroupKey[T, G], inline earlierThan: EarlierThan[T]) = + + inline def latestStatus[T, G]( + inline q: Query[T] + )(using inline groupKey: GroupKey[T, G], inline earlierThan: EarlierThan[T]) = q.leftJoin(q) - .on((a, b) => - groupKey(b) == groupKey(a) && //hello - earlierThan(b, a) - ) - .filter((a, b) => - b.map(b => groupKey(b)).isEmpty) - .map((a, b) => a) + .on((a, b) => + groupKey(b) == groupKey(a) && // hello + earlierThan(b, a) + ) + .filter((a, b) => b.map(b => groupKey(b)).isEmpty) + .map((a, b) => a) - inline def nodesLatest = quote { latestStatus(query[Node]) } - inline def mastersLatest = quote { latestStatus(query[Master]) } - inline def workersLatest = quote { latestStatus(query[Worker]) } + inline def nodesLatest = quote(latestStatus(query[Node])) + inline def mastersLatest = quote(latestStatus(query[Master])) + inline def workersLatest = quote(latestStatus(query[Worker])) - println( run(nodesLatest).string ) - println( run(mastersLatest).string ) - println( run(workersLatest).string ) + println(run(nodesLatest).string) + println(run(mastersLatest).string) + println(run(workersLatest).string) } } diff --git a/quill-sql-tests/src/test/scala/io/getquill/examples/TypelevelUsecase.scala b/quill-sql-tests/src/test/scala/io/getquill/examples/TypelevelUsecase.scala index 1fd840e9c..74d158f62 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/examples/TypelevelUsecase.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/examples/TypelevelUsecase.scala @@ -5,8 +5,7 @@ import io.getquill._ object TypelevelUsecase { - - case class Address(street: String, zip: Int, fk: Int) extends Embedded //helloooo + case class Address(street: String, zip: Int, fk: Int) extends Embedded // helloooo val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ @@ -20,38 +19,38 @@ object TypelevelUsecase { type Out inline def get: Out } - + inline given Path[User, Role] with { type Out = Query[(User, Role)] inline def get: Query[(User, Role)] = for { - s <- query[User] + s <- query[User] sr <- query[UserToRole].join(sr => sr.userId == s.id) - r <- query[Role].join(r => r.id == sr.roleId) + r <- query[Role].join(r => r.id == sr.roleId) } yield (s, r) } - + inline given Path[User, Permission] with { type Out = Query[(User, Role, Permission)] inline def get: Query[(User, Role, Permission)] = for { - s <- query[User] + s <- query[User] so <- query[UserToRole].join(so => so.userId == s.id) - r <- query[Role].join(r => r.id == so.roleId) + r <- query[Role].join(r => r.id == so.roleId) rp <- query[RoleToPermission].join(rp => rp.roleId == r.id) - p <- query[Permission].join(p => p.id == rp.roleId) + p <- query[Permission].join(p => p.id == rp.roleId) } yield (s, r, p) } - + inline def path[F, T](using path: Path[F, T]): path.Out = path.get - - inline def q1 = quote { path[User, Role].filter(so => so._2.name == "Drinker") } - //inline def q1 = quote { path[User, Permission].filter(urp => urp._2.name == "GuiUser" && urp._1.name == "Joe") } - //inline def q1 = quote { path[User, Permission].filter { case (u,r,p) => u.name == "GuiUser" && r.name == "Joe" } } + inline def q1 = quote(path[User, Role].filter(so => so._2.name == "Drinker")) + + // inline def q1 = quote { path[User, Permission].filter(urp => urp._2.name == "GuiUser" && urp._1.name == "Joe") } + // inline def q1 = quote { path[User, Permission].filter { case (u,r,p) => u.name == "GuiUser" && r.name == "Joe" } } - inline def q2 = quote { path[User, Permission].filter((u,r,p) => u.name == "GuiUser" && r.name == "Joe") } - println( run(q2).string(true) ) + inline def q2 = quote(path[User, Permission].filter((u, r, p) => u.name == "GuiUser" && r.name == "Joe")) + println(run(q2).string(true)) // Made copies of this query to in order to profile for performance // println( run(q2).string(true) ) @@ -141,7 +140,7 @@ object TypelevelUsecase { // println( run(q2).string(true) ) def main(args: Array[String]): Unit = { - println( run(q1) ) - println( run(q2).string(true) ) + println(run(q1)) + println(run(q2).string(true)) } } diff --git a/quill-sql-tests/src/test/scala/io/getquill/ported/AggregationSpec.scala b/quill-sql-tests/src/test/scala/io/getquill/ported/AggregationSpec.scala index 2bd806e31..d1f3ae13e 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/ported/AggregationSpec.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/ported/AggregationSpec.scala @@ -1,10 +1,9 @@ package io.getquill.context.sql -import io.getquill.norm.{ DisablePhase, OptionalPhase } +import io.getquill.norm.{DisablePhase, OptionalPhase} import io.getquill._ import io.getquill.norm.ConfigList._ - class AggregationSpec extends Spec { // case class Person(id: Int, name: String, age: Int) case class PersonOpt(name: Option[String], age: Int) @@ -28,16 +27,20 @@ class AggregationSpec extends Spec { // // => SELECT p.age + 1 FROM (SELECT x.age FROM Person x) AS p WHERE (p.age + 1) = 123 // Instead it should remain as the former query "simple operation should not propogate from nested" in { - ctx.run { query[Person].map(p => p.age + 1).nested.filter(p => p == 123) }.string mustEqual "SELECT p.x FROM (SELECT p.age + 1 AS x FROM Person p) AS p WHERE p.x = 123" + ctx.run { + query[Person].map(p => p.age + 1).nested.filter(p => p == 123) + }.string mustEqual "SELECT p.x FROM (SELECT p.age + 1 AS x FROM Person p) AS p WHERE p.x = 123" } "aggregation functions should" - { "work in a map clause that is last in a query" - { - "max" in { ctx.run { query[Person].map(p => max(p.name)) }.string mustEqual "SELECT MAX(p.name) FROM Person p" } - "min" in { ctx.run { query[Person].map(p => min(p.name)) }.string mustEqual "SELECT MIN(p.name) FROM Person p" } - "count" in { ctx.run { query[Person].map(p => count(p.name)) }.string mustEqual "SELECT COUNT(p.name) FROM Person p" } - "avg" in { ctx.run { query[Person].map(p => avg(p.age)) }.string mustEqual "SELECT AVG(p.age) FROM Person p" } - "sum" in { ctx.run { query[Person].map(p => sum(p.age)) }.string mustEqual "SELECT SUM(p.age) FROM Person p" } + "max" in { ctx.run(query[Person].map(p => max(p.name))).string mustEqual "SELECT MAX(p.name) FROM Person p" } + "min" in { ctx.run(query[Person].map(p => min(p.name))).string mustEqual "SELECT MIN(p.name) FROM Person p" } + "count" in { + ctx.run(query[Person].map(p => count(p.name))).string mustEqual "SELECT COUNT(p.name) FROM Person p" + } + "avg" in { ctx.run(query[Person].map(p => avg(p.age))).string mustEqual "SELECT AVG(p.age) FROM Person p" } + "sum" in { ctx.run(query[Person].map(p => sum(p.age))).string mustEqual "SELECT SUM(p.age) FROM Person p" } } "work correctly with a filter cause that is BEFORE the aggreation" in { @@ -80,45 +83,79 @@ class AggregationSpec extends Spec { // } "work externally with optional mapping" - { - "max" in { ctx.run { query[PersonOpt].map(p => max(p.name)) }.string mustEqual "SELECT MAX(p.name) FROM PersonOpt p" } - "min" in { ctx.run { query[PersonOpt].map(p => min(p.name)) }.string mustEqual "SELECT MIN(p.name) FROM PersonOpt p" } - "count" in { ctx.run { query[PersonOpt].map(p => count(p.name)) }.string mustEqual "SELECT COUNT(p.name) FROM PersonOpt p" } - "avg" in { ctx.run { query[PersonOpt].map(p => avg(p.age)) }.string mustEqual "SELECT AVG(p.age) FROM PersonOpt p" } - "sum" in { ctx.run { query[PersonOpt].map(p => sum(p.age)) }.string mustEqual "SELECT SUM(p.age) FROM PersonOpt p" } + "max" in { + ctx.run(query[PersonOpt].map(p => max(p.name))).string mustEqual "SELECT MAX(p.name) FROM PersonOpt p" + } + "min" in { + ctx.run(query[PersonOpt].map(p => min(p.name))).string mustEqual "SELECT MIN(p.name) FROM PersonOpt p" + } + "count" in { + ctx.run(query[PersonOpt].map(p => count(p.name))).string mustEqual "SELECT COUNT(p.name) FROM PersonOpt p" + } + "avg" in { + ctx.run(query[PersonOpt].map(p => avg(p.age))).string mustEqual "SELECT AVG(p.age) FROM PersonOpt p" + } + "sum" in { + ctx.run(query[PersonOpt].map(p => sum(p.age))).string mustEqual "SELECT SUM(p.age) FROM PersonOpt p" + } } } "groupByMap should" - { "work in the simple form" - { - "max" in { ctx.run { query[Person].groupByMap(p => p.id)(p => (p.name, max(p.name))) }.string mustEqual "SELECT p.name AS _1, MAX(p.name) AS _2 FROM Person p GROUP BY p.id" } - "min" in { ctx.run { query[Person].groupByMap(p => p.id)(p => (p.name, min(p.name))) }.string mustEqual "SELECT p.name AS _1, MIN(p.name) AS _2 FROM Person p GROUP BY p.id" } - "count" in { ctx.run { query[Person].groupByMap(p => p.id)(p => (p.name, count(p.name))) }.string mustEqual "SELECT p.name AS _1, COUNT(p.name) AS _2 FROM Person p GROUP BY p.id" } - "avg" in { ctx.run { query[Person].groupByMap(p => p.id)(p => (p.name, avg(p.age))) }.string mustEqual "SELECT p.name AS _1, AVG(p.age) AS _2 FROM Person p GROUP BY p.id" } - "sum" in { ctx.run { query[Person].groupByMap(p => p.id)(p => (p.name, sum(p.age))) }.string mustEqual "SELECT p.name AS _1, SUM(p.age) AS _2 FROM Person p GROUP BY p.id" } + "max" in { + ctx.run { + query[Person].groupByMap(p => p.id)(p => (p.name, max(p.name))) + }.string mustEqual "SELECT p.name AS _1, MAX(p.name) AS _2 FROM Person p GROUP BY p.id" + } + "min" in { + ctx.run { + query[Person].groupByMap(p => p.id)(p => (p.name, min(p.name))) + }.string mustEqual "SELECT p.name AS _1, MIN(p.name) AS _2 FROM Person p GROUP BY p.id" + } + "count" in { + ctx.run { + query[Person].groupByMap(p => p.id)(p => (p.name, count(p.name))) + }.string mustEqual "SELECT p.name AS _1, COUNT(p.name) AS _2 FROM Person p GROUP BY p.id" + } + "avg" in { + ctx.run { + query[Person].groupByMap(p => p.id)(p => (p.name, avg(p.age))) + }.string mustEqual "SELECT p.name AS _1, AVG(p.age) AS _2 FROM Person p GROUP BY p.id" + } + "sum" in { + ctx.run { + query[Person].groupByMap(p => p.id)(p => (p.name, sum(p.age))) + }.string mustEqual "SELECT p.name AS _1, SUM(p.age) AS _2 FROM Person p GROUP BY p.id" + } } "work with multiple aggregators" in { - ctx.run { query[Person].groupByMap(p => p.id)(p => (p.name, max(p.name), avg(p.age))) }.string mustEqual + ctx.run(query[Person].groupByMap(p => p.id)(p => (p.name, max(p.name), avg(p.age)))).string mustEqual "SELECT p.name AS _1, MAX(p.name) AS _2, AVG(p.age) AS _3 FROM Person p GROUP BY p.id" } "work with a filter clause after" in { - ctx.run { query[Person].groupByMap(p => p.id)(p => (p.name, max(p.name))).filter(p => p._2 == "Joe") }.string mustEqual + ctx.run { + query[Person].groupByMap(p => p.id)(p => (p.name, max(p.name))).filter(p => p._2 == "Joe") + }.string mustEqual "SELECT p._1, p._2 FROM (SELECT p.name AS _1, MAX(p.name) AS _2 FROM Person p GROUP BY p.id) AS p WHERE p._2 = 'Joe'" } "work with a filter clause before" in { - ctx.run { query[Person].filter(p => p.name == "Joe").groupByMap(p => p.id)(p => (p.name, max(p.name))) }.string mustEqual + ctx.run { + query[Person].filter(p => p.name == "Joe").groupByMap(p => p.id)(p => (p.name, max(p.name))) + }.string mustEqual "SELECT p.name AS _1, MAX(p.name) AS _2 FROM Person p WHERE p.name = 'Joe' GROUP BY p.id" } "work with a groupByMap(to-leaf).filter" in { - ctx.run { query[Person].groupByMap(p => p.age)(p => max(p.age)).filter(a => a > 1000) }.string mustEqual + ctx.run(query[Person].groupByMap(p => p.age)(p => max(p.age)).filter(a => a > 1000)).string mustEqual "SELECT p.x FROM (SELECT MAX(p.age) AS x FROM Person p GROUP BY p.age) AS p WHERE p.x > 1000" } "work with a map(to-leaf).groupByMap.filter" in { - ctx.run { query[Person].map(p => p.age).groupByMap(p => p)(p => max(p)).filter(a => a > 1000) }.string mustEqual + ctx.run(query[Person].map(p => p.age).groupByMap(p => p)(p => max(p)).filter(a => a > 1000)).string mustEqual "SELECT p.x FROM (SELECT MAX(p.age) AS x FROM Person p GROUP BY p.age) AS p WHERE p.x > 1000" } @@ -126,7 +163,7 @@ class AggregationSpec extends Spec { // // Infix has a special case already so want to not use that specifically. "work with a map(to-leaf).groupByMap.filter - no ApplyMap" in { implicit val d = new DisablePhase { override type Phase = OptionalPhase.ApplyMap :: HNil } - ctx.run { query[Person].map(p => p.age).groupByMap(p => p)(p => max(p)).filter(a => a > 1000) }.string mustEqual + ctx.run(query[Person].map(p => p.age).groupByMap(p => p)(p => max(p)).filter(a => a > 1000)).string mustEqual "SELECT p.x FROM (SELECT MAX(p.age) AS x FROM (SELECT p.age FROM Person p) AS p GROUP BY p.age) AS p WHERE p.x > 1000" } @@ -134,7 +171,10 @@ class AggregationSpec extends Spec { // "work with map(product).filter.groupByMap.filter" in { ctx.run { - query[Person].map(p => NameAge(p.name, p.age)).filter(t => t.age == 123).groupByMap(t => t.name)(t => (t.name, max(t.age))) + query[Person] + .map(p => NameAge(p.name, p.age)) + .filter(t => t.age == 123) + .groupByMap(t => t.name)(t => (t.name, max(t.age))) }.string mustEqual "SELECT p.name AS _1, MAX(p.age) AS _2 FROM Person p WHERE p.age = 123 GROUP BY p.name" } @@ -142,7 +182,10 @@ class AggregationSpec extends Spec { // "work with map(product).filter.groupByMap.filter - no ApplyMap" in { implicit val d = new DisablePhase { override type Phase = OptionalPhase.ApplyMap :: HNil } ctx.run { - query[Person].map(p => NameAge(p.name, p.age)).filter(t => t.age == 123).groupByMap(t => t.name)(t => (t.name, max(t.age))) + query[Person] + .map(p => NameAge(p.name, p.age)) + .filter(t => t.age == 123) + .groupByMap(t => t.name)(t => (t.name, max(t.age))) }.string mustEqual "SELECT p.name AS _1, MAX(p.age) AS _2 FROM (SELECT p.name, p.age FROM Person p) AS p WHERE p.age = 123 GROUP BY p.name" } @@ -156,7 +199,9 @@ class AggregationSpec extends Spec { // ctx.run { for { a <- query[Address] - p <- query[Person].groupByMap(p => p.name == "Joe")(p => PersonInfo(p.id, max(p.age))).leftJoin(p => p.id == a.owner) + p <- query[Person] + .groupByMap(p => p.name == "Joe")(p => PersonInfo(p.id, max(p.age))) + .leftJoin(p => p.id == a.owner) f <- query[Furniture].leftJoin(f => f.owner === p.map(_.id) && f.location == a.id) } yield (a, p, f) }.string mustEqual diff --git a/quill-sql-tests/src/test/scala/io/getquill/ported/ContextMacroSpec.scala b/quill-sql-tests/src/test/scala/io/getquill/ported/ContextMacroSpec.scala index d1369e7d0..033476227 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/ported/ContextMacroSpec.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/ported/ContextMacroSpec.scala @@ -8,7 +8,7 @@ import io.getquill.context.mirror.Row import io.getquill.idiom.Idiom import io.getquill.context.ExecutionType -case class ValueClass(value: Int) extends AnyVal +case class ValueClass(value: Int) extends AnyVal case class GenericValueClass[T](value: T) extends AnyVal class ContextMacroSpec extends Spec { @@ -28,8 +28,14 @@ class ContextMacroSpec extends Spec { inline def q = quote { query[Person].insertValue(lift(p)).returning(_.age) } - testContext.run(q).triple mustEqual (("""querySchema("Person").insert(_$V => _$V.name -> ?, _$V => _$V.age -> ?).returning((x1) => x1.age)""", List("Joe", 123), ExecutionType.Static)) - testContext.translate(q) mustEqual """querySchema("Person").insert(_$V => _$V.name -> 'Joe', _$V => _$V.age -> 123).returning((x1) => x1.age)""" + testContext.run(q).triple mustEqual (( + """querySchema("Person").insert(_$V => _$V.name -> ?, _$V => _$V.age -> ?).returning((x1) => x1.age)""", + List("Joe", 123), + ExecutionType.Static + )) + testContext.translate( + q + ) mustEqual """querySchema("Person").insert(_$V => _$V.name -> 'Joe', _$V => _$V.age -> 123).returning((x1) => x1.age)""" } "single" in { inline def q = quote("123") @@ -40,15 +46,27 @@ class ContextMacroSpec extends Spec { inline def q = quote { liftQuery(List(Person("Joe", 123))).foreach(e => query[Person].insertValue(e)) } - testContext.run(q).triple mustEqual ("""querySchema("Person").insert(_$V => _$V.name -> ?, _$V => _$V.age -> ?)""", List(List("Joe", 123)), ExecutionType.Static) - testContext.translate(q) mustEqual List("""querySchema("Person").insert(_$V => _$V.name -> 'Joe', _$V => _$V.age -> 123)""") + testContext + .run(q) + .triple mustEqual ("""querySchema("Person").insert(_$V => _$V.name -> ?, _$V => _$V.age -> ?)""", List( + List("Joe", 123) + ), ExecutionType.Static) + testContext.translate(q) mustEqual List( + """querySchema("Person").insert(_$V => _$V.name -> 'Joe', _$V => _$V.age -> 123)""" + ) } "batch returning" in { inline def q = quote { liftQuery(List(Person("Joe", 123))).foreach(e => query[Person].insertValue(e).returning(_.age)) } - testContext.run(q).triple mustEqual ("""querySchema("Person").insert(_$V => _$V.name -> ?, _$V => _$V.age -> ?).returning((x2) => x2.age)""", List(List("Joe", 123)), ExecutionType.Static) - testContext.translate(q) mustEqual List("""querySchema("Person").insert(_$V => _$V.name -> 'Joe', _$V => _$V.age -> 123).returning((x2) => x2.age)""") + testContext + .run(q) + .triple mustEqual ("""querySchema("Person").insert(_$V => _$V.name -> ?, _$V => _$V.age -> ?).returning((x2) => x2.age)""", List( + List("Joe", 123) + ), ExecutionType.Static) + testContext.translate(q) mustEqual List( + """querySchema("Person").insert(_$V => _$V.name -> 'Joe', _$V => _$V.age -> 123).returning((x2) => x2.age)""" + ) } "sql" in { inline def q = quote { @@ -398,7 +416,9 @@ class ContextMacroSpec extends Spec { ctx.translate(query[TestEntity]) mustEqual """querySchema("TestEntity")""" } "four" in { - object ctx extends MirrorContext(MirrorIdiom, NamingStrategy(Literal, Escape, UpperCase, SnakeCase)) with TestEntities + object ctx + extends MirrorContext(MirrorIdiom, NamingStrategy(Literal, Escape, UpperCase, SnakeCase)) + with TestEntities import ctx._ ctx.run(query[TestEntity]).string mustEqual """querySchema("TestEntity")""" ctx.translate(query[TestEntity]) mustEqual """querySchema("TestEntity")""" diff --git a/quill-sql-tests/src/test/scala/io/getquill/ported/package-info.scala b/quill-sql-tests/src/test/scala/io/getquill/ported/package-info.scala index a1b3f4ed4..5429f145c 100644 --- a/quill-sql-tests/src/test/scala/io/getquill/ported/package-info.scala +++ b/quill-sql-tests/src/test/scala/io/getquill/ported/package-info.scala @@ -1,9 +1,10 @@ package io.getquill.ported /** - * Tests ported from Scala2-Quill quill-core testing suites. These are usually large files with - * dozens of tests that are being broken up and imported as the functionality is written. - * The tests ported from quill-sql can be found in io.getquill.context.sql. + * Tests ported from Scala2-Quill quill-core testing suites. These are usually + * large files with dozens of tests that are being broken up and imported as the + * functionality is written. The tests ported from quill-sql can be found in + * io.getquill.context.sql. */ object Unused diff --git a/quill-sql/src/main/scala/io/getquill/Dsl.scala b/quill-sql/src/main/scala/io/getquill/Dsl.scala index 0e339c444..4f44a7b20 100644 --- a/quill-sql/src/main/scala/io/getquill/Dsl.scala +++ b/quill-sql/src/main/scala/io/getquill/Dsl.scala @@ -35,7 +35,9 @@ inline def schemaMeta[T](inline entity: String, inline columns: (T => (Any, Stri inline def queryMeta[T, R](inline expand: Quoted[Query[T] => Query[R]])(inline extract: R => T): QueryMeta[T, R] = ${ QueryMetaMacro.embed[T, R]('expand, 'extract) } -/** Automatic implicit ordering DSL for: `query[Person].sortBy(_.field)()` */ +/** + * Automatic implicit ordering DSL for: `query[Person].sortBy(_.field)()` + */ implicit def implicitOrd[T]: Ord[T] = Ord.ascNullsFirst extension (str: String) { @@ -43,16 +45,16 @@ extension (str: String) { } inline def query[T]: EntityQuery[T] = ${ QueryMacro[T] } -inline def select[T]: Query[T] = ${ QueryMacro[T] } +inline def select[T]: Query[T] = ${ QueryMacro[T] } -def max[A](a: A): A = NonQuotedException() -def min[A](a: A): A = NonQuotedException() -def count[A](a: A): A = NonQuotedException() +def max[A](a: A): A = NonQuotedException() +def min[A](a: A): A = NonQuotedException() +def count[A](a: A): A = NonQuotedException() def avg[A](a: A)(implicit n: Numeric[A]): BigDecimal = NonQuotedException() -def sum[A](a: A)(implicit n: Numeric[A]): A = NonQuotedException() +def sum[A](a: A)(implicit n: Numeric[A]): A = NonQuotedException() def avg[A](a: Option[A])(implicit n: Numeric[A]): Option[BigDecimal] = NonQuotedException() -def sum[A](a: Option[A])(implicit n: Numeric[A]): Option[A] = NonQuotedException() +def sum[A](a: Option[A])(implicit n: Numeric[A]): Option[A] = NonQuotedException() extension [T](o: Option[T]) { def filterIfDefined(f: T => Boolean): Boolean = NonQuotedException() @@ -107,6 +109,8 @@ inline implicit def autoQuote[T](inline body: T): Quoted[T] = ${ QuoteMacro[T](' // these functions on the quoted variant of the EntityQuery for the types to infer correctly. // see ActionSpec.scala action->insert->simple, using nested select, etc... tets for examples of this extension [T](inline quotedEntity: Quoted[EntityQuery[T]]) { - inline def insert(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*): Insert[T] = unquote[EntityQuery[T]](quotedEntity).insert(f, f2: _*) - inline def update(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*): Update[T] = unquote[EntityQuery[T]](quotedEntity).update(f, f2: _*) + inline def insert(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*): Insert[T] = + unquote[EntityQuery[T]](quotedEntity).insert(f, f2: _*) + inline def update(inline f: (T => (Any, Any)), inline f2: (T => (Any, Any))*): Update[T] = + unquote[EntityQuery[T]](quotedEntity).update(f, f2: _*) } diff --git a/quill-sql/src/main/scala/io/getquill/DslModel.scala b/quill-sql/src/main/scala/io/getquill/DslModel.scala index e392dc32e..43012d4c1 100644 --- a/quill-sql/src/main/scala/io/getquill/DslModel.scala +++ b/quill-sql/src/main/scala/io/getquill/DslModel.scala @@ -20,27 +20,39 @@ sealed trait Unquoteable trait EntityQuery[T] extends EntityQueryModel[T] with Unquoteable { override def withFilter(f: T => Boolean): EntityQuery[T] = NonQuotedException() - override def filter(f: T => Boolean): EntityQuery[T] = NonQuotedException() - override def map[R](f: T => R): EntityQuery[R] = NonQuotedException() + override def filter(f: T => Boolean): EntityQuery[T] = NonQuotedException() + override def map[R](f: T => R): EntityQuery[R] = NonQuotedException() } -class Quoted[+T](val ast: io.getquill.ast.Ast, val lifts: List[Planter[_, _, _]], val runtimeQuotes: List[QuotationVase]) { +class Quoted[+T]( + val ast: io.getquill.ast.Ast, + val lifts: List[Planter[_, _, _]], + val runtimeQuotes: List[QuotationVase] +) { // This is not a case-class because the dynamic API uses (quoted:Quoted[(foo, bar)])._1 etc... which would return quoted.ast // where instead we want unquote(quoted)._1 to happen instead but the implicit unquote would never happen if quoted // is a case class in the _1 property is available on the object. - protected lazy val id = Quoted.QuotedId(ast, lifts, runtimeQuotes) - override def toString = io.getquill.util.Messages.qprint(id).plainText + protected lazy val id = Quoted.QuotedId(ast, lifts, runtimeQuotes) + override def toString = io.getquill.util.Messages.qprint(id).plainText override def hashCode(): Int = id.hashCode override def equals(other: Any): Boolean = other match { case q: Quoted[_] => q.id == this.id case _ => false } - def copy(ast: io.getquill.ast.Ast = this.ast, lifts: List[Planter[_, _, _]] = this.lifts, runtimeQuotes: List[QuotationVase] = this.runtimeQuotes) = + def copy( + ast: io.getquill.ast.Ast = this.ast, + lifts: List[Planter[_, _, _]] = this.lifts, + runtimeQuotes: List[QuotationVase] = this.runtimeQuotes + ) = Quoted(ast, lifts, runtimeQuotes) } object Quoted { - case class QuotedId(val ast: io.getquill.ast.Ast, val lifts: List[Planter[_, _, _]], val runtimeQuotes: List[QuotationVase]) + case class QuotedId( + val ast: io.getquill.ast.Ast, + val lifts: List[Planter[_, _, _]], + val runtimeQuotes: List[QuotationVase] + ) def apply[T](ast: io.getquill.ast.Ast, lifts: List[Planter[_, _, _]], runtimeQuotes: List[QuotationVase]) = new Quoted[T](ast, lifts, runtimeQuotes) def unapply[T](quoted: Quoted[T]) = @@ -81,20 +93,30 @@ object compat { } } -case class InjectableEagerPlanter[T, PrepareRow, Session](inject: _ => T, encoder: GenericEncoder[T, PrepareRow, Session], uid: String) extends Planter[T, PrepareRow, Session] { +case class InjectableEagerPlanter[T, PrepareRow, Session]( + inject: _ => T, + encoder: GenericEncoder[T, PrepareRow, Session], + uid: String +) extends Planter[T, PrepareRow, Session] { // This is the equivalent of InjectableEagerPlanterExpr's 'inject' method only for dynamic batch queries // TODO Try changing to Any => T and see if exceptions happen anywhere - def withInject(element: Any) = EagerPlanter[T, PrepareRow, Session](inject.asInstanceOf[Any => T](element), encoder, uid) + def withInject(element: Any) = + EagerPlanter[T, PrepareRow, Session](inject.asInstanceOf[Any => T](element), encoder, uid) def unquote: T = throw new RuntimeException("Unquotation can only be done from a quoted block.") } -case class EagerListPlanter[T, PrepareRow, Session](values: List[T], encoder: GenericEncoder[T, PrepareRow, Session], uid: String) extends Planter[Query[T], PrepareRow, Session] { +case class EagerListPlanter[T, PrepareRow, Session]( + values: List[T], + encoder: GenericEncoder[T, PrepareRow, Session], + uid: String +) extends Planter[Query[T], PrepareRow, Session] { def unquote: Query[T] = throw new RuntimeException("Unquotation can only be done from a quoted block.") } -case class EagerPlanter[T, PrepareRow, Session](value: T, encoder: GenericEncoder[T, PrepareRow, Session], uid: String) extends Planter[T, PrepareRow, Session] { +case class EagerPlanter[T, PrepareRow, Session](value: T, encoder: GenericEncoder[T, PrepareRow, Session], uid: String) + extends Planter[T, PrepareRow, Session] { def unquote: T = throw new RuntimeException("Unquotation can only be done from a quoted block.") } @@ -105,7 +127,12 @@ case class LazyPlanter[T, PrepareRow, Session](value: T, uid: String) extends Pl } // Equivalent to CaseClassValueLift -case class EagerEntitiesPlanter[T, PrepareRow, Session](value: Iterable[T], uid: String, fieldGetters: List[InjectableEagerPlanter[?, PrepareRow, Session]], fieldClass: ast.CaseClass) extends Planter[Query[T], PrepareRow, Session] { +case class EagerEntitiesPlanter[T, PrepareRow, Session]( + value: Iterable[T], + uid: String, + fieldGetters: List[InjectableEagerPlanter[?, PrepareRow, Session]], + fieldClass: ast.CaseClass +) extends Planter[Query[T], PrepareRow, Session] { def unquote: Query[T] = throw new RuntimeException("Unquotation can only be done from a quoted block.") } @@ -145,7 +172,8 @@ case class UpdateMeta[T](val entity: Quoted[T], uid: String) extends QuotationLo // Then ActionMacro will take a MT (i.e. MetaType) generic argument that will control what to summon and what kind of AST // element Ast.Insert or Ast.Update to return (also there should probably be 'Delete' meta type which does not summon a column-excluding meta) -case class QueryMeta[T, R](val entity: Quoted[Query[T] => Query[R]], uid: String, extract: R => T) extends QuotationLot[Query[T] => Query[R]](uid) +case class QueryMeta[T, R](val entity: Quoted[Query[T] => Query[R]], uid: String, extract: R => T) + extends QuotationLot[Query[T] => Query[R]](uid) // TODO Rename to EntityLift // Equivalent to CaseClassValueLift diff --git a/quill-sql/src/main/scala/io/getquill/DynamicDsl.scala b/quill-sql/src/main/scala/io/getquill/DynamicDsl.scala index ceb6fd985..86f3abd11 100644 --- a/quill-sql/src/main/scala/io/getquill/DynamicDsl.scala +++ b/quill-sql/src/main/scala/io/getquill/DynamicDsl.scala @@ -40,15 +40,21 @@ implicit class ToDynamicUpdate[T](q: Quoted[Update[T]]) { } implicit class ToDynamicActionReturning[T, U]( - q: Quoted[ActionReturning[T, U]] + q: Quoted[ActionReturning[T, U]] ) { def dynamic: DynamicActionReturning[T, U] = DynamicActionReturning(q) } -private[getquill] def dyn[T](ast: Ast, lifts: List[Planter[_, _, _]], runtimeQuotes: List[QuotationVase]): DynamicQuery[T] = +private[getquill] def dyn[T]( + ast: Ast, + lifts: List[Planter[_, _, _]], + runtimeQuotes: List[QuotationVase] +): DynamicQuery[T] = DynamicQuery[T](Quoted[Query[T]](ast, lifts, runtimeQuotes)) -private[getquill] def spliceLift[O](o: O, otherLifts: List[Planter[_, _, _]], runtimeQuotes: List[QuotationVase])(implicit enc: GenericEncoder[O, _, _]) = { +private[getquill] def spliceLift[O](o: O, otherLifts: List[Planter[_, _, _]], runtimeQuotes: List[QuotationVase])( + implicit enc: GenericEncoder[O, _, _] +) = { val uid = UUID.randomUUID().toString + "foo" new Quoted[O](ScalarTag(uid, External.Source.Parser), EagerPlanter(o, enc, uid) +: otherLifts, runtimeQuotes) } @@ -68,19 +74,19 @@ inline implicit def dynamicUnquote[T](inline d: DynamicQuery[T]): Query[T] = ${ UnquoteMacro('{ d.q }) } def set[T, U]( - property: Quoted[T] => Quoted[U], - value: Quoted[U] + property: Quoted[T] => Quoted[U], + value: Quoted[U] ): DynamicSet[T, U] = DynamicSetValue(property, value) def setValue[T, U]( - property: Quoted[T] => Quoted[U], - value: U + property: Quoted[T] => Quoted[U], + value: U )(implicit enc: GenericEncoder[U, _, _]): DynamicSet[T, U] = set[T, U](property, spliceLift(value, Nil, Nil)) -def setOpt[T, U](property: Quoted[T] => Quoted[U], value: Option[U])( - implicit enc: GenericEncoder[U, _, _] +def setOpt[T, U](property: Quoted[T] => Quoted[U], value: Option[U])(implicit + enc: GenericEncoder[U, _, _] ): DynamicSet[T, U] = value match { case Some(v) => setValue(property, v) @@ -91,21 +97,21 @@ def set[T, U](property: String, value: Quoted[U]): DynamicSet[T, U] = set((f: Quoted[T]) => Quoted(Property(f.ast, property), f.lifts, f.runtimeQuotes), value) def setValue[T, U]( - property: String, - value: U + property: String, + value: U )(implicit enc: GenericEncoder[U, _, _]): DynamicSet[T, U] = set(property, spliceLift(value, Nil, Nil)) def alias[T]( - property: Quoted[T] => Quoted[Any], - name: String + property: Quoted[T] => Quoted[Any], + name: String ): DynamicAlias[T] = DynamicAlias(property, name) -implicit inline def toQuoted[T](inline q: DynamicQuery[T]): Quoted[Query[T]] = q.q +implicit inline def toQuoted[T](inline q: DynamicQuery[T]): Quoted[Query[T]] = q.q implicit inline def toQuoted[T](inline q: DynamicEntityQuery[T]): Quoted[EntityQuery[T]] = q.q -implicit inline def toQuoted[T <: DslAction[_]](inline q: DynamicAction[T]): Quoted[T] = q.q +implicit inline def toQuoted[T <: DslAction[_]](inline q: DynamicAction[T]): Quoted[T] = q.q -inline def dynamicQuery[T]: DynamicEntityQuery[T] = { +inline def dynamicQuery[T]: DynamicEntityQuery[T] = DynamicEntityQuery( Quoted[EntityQuery[T]]( Entity(Extractors.typeName[T], Nil, QuatMaking.inferQuatType[T].probit), @@ -113,11 +119,10 @@ inline def dynamicQuery[T]: DynamicEntityQuery[T] = { Nil ) ) -} inline def dynamicQuerySchema[T]( - entity: String, - columns: DynamicAlias[T]* + entity: String, + columns: DynamicAlias[T]* ): DynamicEntityQuery[T] = { val aliasesAndProperties = columns.map { alias => @@ -132,8 +137,8 @@ inline def dynamicQuerySchema[T]( val v = alias.property(Quoted[T](Ident("v", QuatMaking.inferQuatType[T].probit), Nil, Nil)) (v, PropertyAlias(path(v.ast), alias.name)) } - val aliases = aliasesAndProperties.map(_._2) - val lifts = aliasesAndProperties.map(_._1.lifts).flatten.toList + val aliases = aliasesAndProperties.map(_._2) + val lifts = aliasesAndProperties.map(_._1.lifts).flatten.toList val runtimeQuotes = aliasesAndProperties.map(_._1.runtimeQuotes).flatten.toList DynamicEntityQuery( Quoted[EntityQuery[T]]( diff --git a/quill-sql/src/main/scala/io/getquill/DynamicDslModel.scala b/quill-sql/src/main/scala/io/getquill/DynamicDslModel.scala index c470bed2c..dff1068cc 100644 --- a/quill-sql/src/main/scala/io/getquill/DynamicDslModel.scala +++ b/quill-sql/src/main/scala/io/getquill/DynamicDslModel.scala @@ -48,8 +48,8 @@ case class DynamicAlias[T](property: Quoted[T] => Quoted[Any], name: String) sealed trait DynamicSet[T, U] case class DynamicSetValue[T, U]( - property: Quoted[T] => Quoted[U], - value: Quoted[U] + property: Quoted[T] => Quoted[U], + value: Quoted[U] ) extends DynamicSet[T, U] case class DynamicSetEmpty[T, U]() extends DynamicSet[T, U] @@ -64,9 +64,9 @@ sealed class DynamicQuery[+T](val q: Quoted[Query[T]]) { Quoted[R](ast, otherLifts ++ q.lifts, otherRuntimeQuotes ++ q.runtimeQuotes) protected[this] def transform[U, V, R]( - f: Quoted[U] => Quoted[V], - t: (Ast, Ident, Ast) => Ast, - r: (Ast, List[Planter[_, _, _]], List[QuotationVase]) => R = dyn _ + f: Quoted[U] => Quoted[V], + t: (Ast, Ident, Ast) => Ast, + r: (Ast, List[Planter[_, _, _]], List[QuotationVase]) => R = dyn _ ) = withFreshIdent { v => val sp = splice(v, Nil, Nil) @@ -75,10 +75,10 @@ sealed class DynamicQuery[+T](val q: Quoted[Query[T]]) { }(Quat.Generic) protected[this] def transformOpt[O, R, TT >: T, D <: DynamicQuery[TT]]( - opt: Option[O], - f: (Quoted[TT], Quoted[O]) => Quoted[R], - t: (Quoted[TT] => Quoted[R]) => D, - thiz: D + opt: Option[O], + f: (Quoted[TT], Quoted[O]) => Quoted[R], + t: (Quoted[TT] => Quoted[R]) => D, + thiz: D )(implicit enc: GenericEncoder[O, _, _]) = opt match { case Some(o) => @@ -101,23 +101,23 @@ sealed class DynamicQuery[+T](val q: Quoted[Query[T]]) { filter(f) def filterOpt[O](opt: Option[O])( - f: (Quoted[T], Quoted[O]) => Quoted[Boolean] + f: (Quoted[T], Quoted[O]) => Quoted[Boolean] )(implicit enc: GenericEncoder[O, _, _]): DynamicQuery[T] = transformOpt(opt, f, filter, this) def filterIf( - cond: Boolean + cond: Boolean )(f: Quoted[T] => Quoted[Boolean]): DynamicQuery[T] = if (cond) filter(f) else this def concatMap[R, U]( - f: Quoted[T] => Quoted[U] + f: Quoted[T] => Quoted[U] )(implicit ev: U => Iterable[R]): DynamicQuery[R] = transform(f, ConcatMap(_, _, _)) def sortBy[R]( - f: Quoted[T] => Quoted[R] + f: Quoted[T] => Quoted[R] )(implicit ord: Ord[R]): DynamicQuery[T] = transform(f, SortBy(_, _, _, ord.ord)) @@ -189,23 +189,23 @@ sealed class DynamicQuery[+T](val q: Quoted[Query[T]]) { DynamicJoinQuery(InnerJoin, q, q2) def leftJoin[A >: T, B]( - q2: Quoted[Query[B]] + q2: Quoted[Query[B]] ): DynamicJoinQuery[A, B, (A, Option[B])] = DynamicJoinQuery(LeftJoin, q, q2) def rightJoin[A >: T, B]( - q2: Quoted[Query[B]] + q2: Quoted[Query[B]] ): DynamicJoinQuery[A, B, (Option[A], B)] = DynamicJoinQuery(RightJoin, q, q2) def fullJoin[A >: T, B]( - q2: Quoted[Query[B]] + q2: Quoted[Query[B]] ): DynamicJoinQuery[A, B, (Option[A], Option[B])] = DynamicJoinQuery(FullJoin, q, q2) private[this] def flatJoin[R]( - tpe: JoinType, - on: Quoted[T] => Quoted[Boolean] + tpe: JoinType, + on: Quoted[T] => Quoted[Boolean] ): DynamicQuery[R] = withFreshIdent { v => val q2 = on(splice(v, Nil, Nil)) @@ -216,12 +216,12 @@ sealed class DynamicQuery[+T](val q: Quoted[Query[T]]) { flatJoin(InnerJoin, on) def leftJoin[A >: T]( - on: Quoted[A] => Quoted[Boolean] + on: Quoted[A] => Quoted[Boolean] ): DynamicQuery[Option[A]] = flatJoin(LeftJoin, on) def rightJoin[A >: T]( - on: Quoted[A] => Quoted[Boolean] + on: Quoted[A] => Quoted[Boolean] ): DynamicQuery[Option[A]] = flatJoin(RightJoin, on) @@ -250,21 +250,24 @@ sealed class DynamicQuery[+T](val q: Quoted[Query[T]]) { } case class DynamicJoinQuery[A, B, R]( - tpe: JoinType, - q1: Quoted[Query[A]], - q2: Quoted[Query[B]] + tpe: JoinType, + q1: Quoted[Query[A]], + q2: Quoted[Query[B]] ) { private def splice[R](ast: Ast) = Quoted[R](ast, q1.lifts ++ q2.lifts, q1.runtimeQuotes ++ q2.runtimeQuotes) - def on(f: (Quoted[A], Quoted[B]) => Quoted[Boolean]): DynamicQuery[R] = { + def on(f: (Quoted[A], Quoted[B]) => Quoted[Boolean]): DynamicQuery[R] = withFreshIdent { iA => withFreshIdent { iB => val q3 = f(splice(iA), splice(iB)) - dyn(Join(tpe, q1.ast, q2.ast, iA, iB, q3.ast), q1.lifts ++ q2.lifts ++ q3.lifts, q1.runtimeQuotes ++ q2.runtimeQuotes ++ q3.runtimeQuotes) + dyn( + Join(tpe, q1.ast, q2.ast, iA, iB, q3.ast), + q1.lifts ++ q2.lifts ++ q3.lifts, + q1.runtimeQuotes ++ q2.runtimeQuotes ++ q3.runtimeQuotes + ) }(q2.ast.quat) // TODO Verify Quat Later - }(q1.ast.quat) // TODO Verify Quat Later - } + }(q1.ast.quat) // TODO Verify Quat Later } case class DynamicEntityQuery[T](override val q: Quoted[EntityQuery[T]]) extends DynamicQuery[T](q) { @@ -273,17 +276,17 @@ case class DynamicEntityQuery[T](override val q: Quoted[EntityQuery[T]]) extends DynamicEntityQuery(Quoted[EntityQuery[R]](ast, q.lifts ++ lifts, q.runtimeQuotes ++ runtimeQuotes)) override def filter( - f: Quoted[T] => Quoted[Boolean] + f: Quoted[T] => Quoted[Boolean] ): DynamicEntityQuery[T] = transform(f, Filter(_, _, _), dyn) override def withFilter( - f: Quoted[T] => Quoted[Boolean] + f: Quoted[T] => Quoted[Boolean] ): DynamicEntityQuery[T] = filter(f) override def filterOpt[O](opt: Option[O])( - f: (Quoted[T], Quoted[O]) => Quoted[Boolean] + f: (Quoted[T], Quoted[O]) => Quoted[Boolean] )(implicit enc: GenericEncoder[O, _, _]): DynamicEntityQuery[T] = transformOpt(opt, f, filter, this) @@ -299,31 +302,34 @@ case class DynamicEntityQuery[T](override val q: Quoted[EntityQuery[T]]) extends type DynamicAssignment[U] = ((Quoted[T] => Quoted[U]), U) private[this] def propsValuesAndQuotes[S]( - l: List[DynamicSet[S, _]] + l: List[DynamicSet[S, _]] ) = - l.collect { - case s: DynamicSetValue[_, _] => - val v = Ident("v", Quat.Generic) - val spliceQuote = Quoted(v, q.lifts, q.runtimeQuotes) - val setPropertyQuote = s.property(spliceQuote) - val setValueQuote = s.value - (setPropertyQuote, setValueQuote, Assignment(v, setPropertyQuote.ast, setValueQuote.ast)) + l.collect { case s: DynamicSetValue[_, _] => + val v = Ident("v", Quat.Generic) + val spliceQuote = Quoted(v, q.lifts, q.runtimeQuotes) + val setPropertyQuote = s.property(spliceQuote) + val setValueQuote = s.value + (setPropertyQuote, setValueQuote, Assignment(v, setPropertyQuote.ast, setValueQuote.ast)) } def insert(l: DynamicSet[T, _]*): DynamicInsert[T] = { - val outputs = propsValuesAndQuotes(l.toList) - val assignemnts = outputs.map(_._3) - val lifts = (outputs.map(_._1.lifts).flatten ++ outputs.map(_._2.lifts).flatten).distinct + val outputs = propsValuesAndQuotes(l.toList) + val assignemnts = outputs.map(_._3) + val lifts = (outputs.map(_._1.lifts).flatten ++ outputs.map(_._2.lifts).flatten).distinct val runtimeQuotes = (outputs.map(_._1.runtimeQuotes).flatten ++ outputs.map(_._2.runtimeQuotes).flatten).distinct DynamicInsert( - Quoted[Insert[T]](io.getquill.ast.Insert(DynamicEntityQuery.this.q.ast, assignemnts), q.lifts ++ lifts, q.runtimeQuotes ++ runtimeQuotes) + Quoted[Insert[T]]( + io.getquill.ast.Insert(DynamicEntityQuery.this.q.ast, assignemnts), + q.lifts ++ lifts, + q.runtimeQuotes ++ runtimeQuotes + ) ) } def update(sets: DynamicSet[T, _]*): DynamicUpdate[T] = { - val outputs = propsValuesAndQuotes(sets.toList) - val assignemnts = outputs.map(_._3) - val lifts = (outputs.map(_._1.lifts).flatten ++ outputs.map(_._2.lifts).flatten).distinct + val outputs = propsValuesAndQuotes(sets.toList) + val assignemnts = outputs.map(_._3) + val lifts = (outputs.map(_._1.lifts).flatten ++ outputs.map(_._2.lifts).flatten).distinct val runtimeQuotes = (outputs.map(_._1.runtimeQuotes).flatten ++ outputs.map(_._2.runtimeQuotes).flatten).distinct DynamicUpdate( Quoted[Update[T]]( @@ -372,10 +378,10 @@ trait DynamicInsert[E] extends DynamicAction[Insert[E]] { q.runtimeQuotes ++ returnQuote.runtimeQuotes ) ) - } { Quat.Generic } + }(Quat.Generic) def returningGenerated[R]( - f: Quoted[E] => Quoted[R] + f: Quoted[E] => Quoted[R] ): DynamicActionReturning[E, R] = withFreshIdent { v => val returnQuote = f(Quoted(v, q.lifts, q.runtimeQuotes)) @@ -386,7 +392,7 @@ trait DynamicInsert[E] extends DynamicAction[Insert[E]] { q.runtimeQuotes ++ returnQuote.runtimeQuotes ) ) - } { Quat.Generic } + }(Quat.Generic) def onConflictIgnore: DynamicInsert[E] = DynamicInsert[E]( @@ -402,7 +408,7 @@ trait DynamicInsert[E] extends DynamicAction[Insert[E]] { ) def onConflictIgnore( - targets: (Quoted[E] => Quoted[Any])* + targets: (Quoted[E] => Quoted[Any])* ): DynamicInsert[E] = { val v = Quoted[E](Ident("v", Quat.Generic), q.lifts, q.runtimeQuotes) val quotesAndProperties = @@ -414,8 +420,8 @@ trait DynamicInsert[E] extends DynamicAction[Insert[E]] { fail(s"Invalid ignore column: $p") } } - val newProperties = quotesAndProperties.map(_._2) - val newLifts = quotesAndProperties.map(_._1.lifts).flatten.distinct + val newProperties = quotesAndProperties.map(_._2) + val newLifts = quotesAndProperties.map(_._1.lifts).flatten.distinct val newRuntimeQuotes = quotesAndProperties.map(_._1.runtimeQuotes).flatten.distinct DynamicInsert[E]( Quoted[Insert[E]]( @@ -432,9 +438,7 @@ trait DynamicInsert[E] extends DynamicAction[Insert[E]] { } case class DynamicActionReturning[E, Output]( - q: Quoted[ActionReturning[E, Output]] + q: Quoted[ActionReturning[E, Output]] ) extends DynamicAction[ActionReturning[E, Output]] -case class DynamicUpdate[E](q: Quoted[Update[E]]) - extends DynamicAction[Update[E]] -case class DynamicDelete[E](q: Quoted[Delete[E]]) - extends DynamicAction[Delete[E]] +case class DynamicUpdate[E](q: Quoted[Update[E]]) extends DynamicAction[Update[E]] +case class DynamicDelete[E](q: Quoted[Delete[E]]) extends DynamicAction[Delete[E]] diff --git a/quill-sql/src/main/scala/io/getquill/MirrorContext.scala b/quill-sql/src/main/scala/io/getquill/MirrorContext.scala index 4a166d197..1e73ff630 100644 --- a/quill-sql/src/main/scala/io/getquill/MirrorContext.scala +++ b/quill-sql/src/main/scala/io/getquill/MirrorContext.scala @@ -7,8 +7,12 @@ import io.getquill.idiom.Idiom import io.getquill.NamingStrategy import scala.annotation.targetName -class MirrorContext[+Dialect <: Idiom, +Naming <: NamingStrategy](val idiom: Dialect, val naming: Naming, val session: MirrorSession = MirrorSession("DefaultMirrorContextSession")) - extends MirrorContextBase[Dialect, Naming] with AstSplicing +class MirrorContext[+Dialect <: Idiom, +Naming <: NamingStrategy]( + val idiom: Dialect, + val naming: Naming, + val session: MirrorSession = MirrorSession("DefaultMirrorContextSession") +) extends MirrorContextBase[Dialect, Naming] + with AstSplicing trait MirrorContextBase[+Dialect <: Idiom, +Naming <: NamingStrategy] extends Context[Dialect, Naming] @@ -16,21 +20,21 @@ trait MirrorContextBase[+Dialect <: Idiom, +Naming <: NamingStrategy] with ContextVerbTranslate[Dialect, Naming] with MirrorDecoders with MirrorEncoders { self => - override type Result[T] = T - override type RunQueryResult[T] = QueryMirror[T] - override type RunQuerySingleResult[T] = QueryMirror[T] - override type PrepareRow = Row - override type ResultRow = Row - override type RunActionResult = ActionMirror - override type RunActionReturningResult[T] = ActionReturningMirror[_, T] + override type Result[T] = T + override type RunQueryResult[T] = QueryMirror[T] + override type RunQuerySingleResult[T] = QueryMirror[T] + override type PrepareRow = Row + override type ResultRow = Row + override type RunActionResult = ActionMirror + override type RunActionReturningResult[T] = ActionReturningMirror[_, T] override type RunBatchActionReturningResult[T] = BatchActionReturningMirror[T] - override type RunBatchActionResult = BatchActionMirror - override type Session = MirrorSession - override type Extractor[T] = (ResultRow, MirrorSession) => T + override type RunBatchActionResult = BatchActionMirror + override type Session = MirrorSession + override type Extractor[T] = (ResultRow, MirrorSession) => T - override type Runner = Unit + override type Runner = Unit override type TranslateRunner = Unit - override def context: Runner = () + override def context: Runner = () override def translateContext: Runner = () def session: MirrorSession @@ -49,60 +53,106 @@ trait MirrorContextBase[+Dialect <: Idiom, +Naming <: NamingStrategy] } case class ActionMirror(string: String, prepareRow: PrepareRow, info: ExecutionInfo) - case class ActionReturningMirror[T, R](string: String, prepareRow: PrepareRow, extractor: Extractor[T], returningBehavior: ReturnAction, info: ExecutionInfo) + case class ActionReturningMirror[T, R]( + string: String, + prepareRow: PrepareRow, + extractor: Extractor[T], + returningBehavior: ReturnAction, + info: ExecutionInfo + ) case class BatchActionMirror(groups: List[(String, List[Row])], info: ExecutionInfo) - case class BatchActionReturningMirror[T](groups: List[(String, ReturnAction, List[PrepareRow])], extractor: Extractor[T], info: ExecutionInfo) + case class BatchActionReturningMirror[T]( + groups: List[(String, ReturnAction, List[PrepareRow])], + extractor: Extractor[T], + info: ExecutionInfo + ) @targetName("runQueryDefault") inline def run[T](inline quoted: Quoted[Query[T]]): QueryMirror[T] = InternalApi.runQueryDefault(quoted) @targetName("runQuery") - inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): QueryMirror[T] = InternalApi.runQuery(quoted, wrap) + inline def run[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): QueryMirror[T] = + InternalApi.runQuery(quoted, wrap) @targetName("runQuerySingle") inline def run[T](inline quoted: Quoted[T]): QueryMirror[T] = InternalApi.runQuerySingle(quoted) @targetName("runAction") inline def run[E](inline quoted: Quoted[Action[E]]): ActionMirror = InternalApi.runAction(quoted) @targetName("runActionReturning") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ActionReturningMirror[T, T] = InternalApi.runActionReturning(quoted).asInstanceOf[ActionReturningMirror[T, T]] + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, T]]): ActionReturningMirror[T, T] = + InternalApi.runActionReturning(quoted).asInstanceOf[ActionReturningMirror[T, T]] @targetName("runActionReturningMany") - inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ActionReturningMirror[T, List[T]] = InternalApi.runActionReturningMany(quoted).asInstanceOf[ActionReturningMirror[T, List[T]]] + inline def run[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): ActionReturningMirror[T, List[T]] = + InternalApi.runActionReturningMany(quoted).asInstanceOf[ActionReturningMirror[T, List[T]]] @targetName("runBatchAction") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): BatchActionMirror = InternalApi.runBatchAction(quoted, rowsPerBatch) + inline def run[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): BatchActionMirror = InternalApi.runBatchAction(quoted, rowsPerBatch) @targetName("runBatchActionDefault") - inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): BatchActionMirror = InternalApi.runBatchAction(quoted, 1) + inline def run[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): BatchActionMirror = + InternalApi.runBatchAction(quoted, 1) @targetName("runBatchActionReturning") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): BatchActionReturningMirror[T] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): BatchActionReturningMirror[T] = InternalApi.runBatchActionReturning(quoted, rowsPerBatch) @targetName("runBatchActionReturningDefault") - inline def run[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): BatchActionReturningMirror[T] = InternalApi.runBatchActionReturning(quoted, 1) - - override def executeQuery[T](string: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner) = + inline def run[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]] + ): BatchActionReturningMirror[T] = InternalApi.runBatchActionReturning(quoted, 1) + + override def executeQuery[T]( + string: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner) = QueryMirror(string, prepare(Row(), session)._2, extractor, info) - override def executeQuerySingle[T](string: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner) = + override def executeQuerySingle[T]( + string: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner) = QueryMirror(string, prepare(Row(), session)._2, extractor, info) - override def executeAction(string: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner): Result[RunActionResult] = + override def executeAction(string: String, prepare: Prepare = identityPrepare)( + info: ExecutionInfo, + dc: Runner + ): Result[RunActionResult] = ActionMirror(string, prepare(Row(), session)._2, info) - def executeActionReturning[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): Result[RunActionReturningResult[T]] = + def executeActionReturning[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): Result[RunActionReturningResult[T]] = ActionReturningMirror[T, T](sql, prepare(Row(), session)._2, extractor, returningBehavior, info) - def executeActionReturningMany[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T], returningBehavior: ReturnAction)(info: ExecutionInfo, dc: Runner): Result[RunActionReturningResult[List[T]]] = + def executeActionReturningMany[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T], + returningBehavior: ReturnAction + )(info: ExecutionInfo, dc: Runner): Result[RunActionReturningResult[List[T]]] = ActionReturningMirror[T, List[T]](sql, prepare(Row(), session)._2, extractor, returningBehavior, info) - override def executeBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner): Result[RunBatchActionResult] = + override def executeBatchAction( + groups: List[BatchGroup] + )(info: ExecutionInfo, dc: Runner): Result[RunBatchActionResult] = BatchActionMirror( - groups.map { - case BatchGroup(string, prepare) => - (string, prepare.map(_(Row(), session)._2)) + groups.map { case BatchGroup(string, prepare) => + (string, prepare.map(_(Row(), session)._2)) }, info ) - override def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(info: ExecutionInfo, dc: Runner): Result[RunBatchActionReturningResult[T]] = + override def executeBatchActionReturning[T]( + groups: List[BatchGroupReturning], + extractor: Extractor[T] + )(info: ExecutionInfo, dc: Runner): Result[RunBatchActionReturningResult[T]] = BatchActionReturningMirror[T]( - groups.map { - case BatchGroupReturning(string, returningBehavior, prepare) => - (string, returningBehavior, prepare.map(_(Row(), session)._2)) + groups.map { case BatchGroupReturning(string, returningBehavior, prepare) => + (string, returningBehavior, prepare.map(_(Row(), session)._2)) }, extractor, info @@ -111,8 +161,8 @@ trait MirrorContextBase[+Dialect <: Idiom, +Naming <: NamingStrategy] case class PrepareQueryMirror(sql: String, prepare: Prepare, info: ExecutionInfo) case class PrepareBatchMirror(groups: List[(String, List[PrepareRow])], info: ExecutionInfo) - type PrepareQueryResult = PrepareQueryMirror - type PrepareActionResult = PrepareQueryMirror + type PrepareQueryResult = PrepareQueryMirror + type PrepareActionResult = PrepareQueryMirror type PrepareBatchActionResult = PrepareBatchMirror def prepareSingle(string: String, prepare: Prepare = identityPrepare)(info: ExecutionInfo, dc: Runner) = @@ -126,9 +176,8 @@ trait MirrorContextBase[+Dialect <: Idiom, +Naming <: NamingStrategy] def prepareBatchAction(groups: List[BatchGroup])(info: ExecutionInfo, dc: Runner) = PrepareBatchMirror( - groups.map { - case BatchGroup(string, prepare) => - (string, prepare.map(_(Row(), session)._2)) + groups.map { case BatchGroup(string, prepare) => + (string, prepare.map(_(Row(), session)._2)) }, info ) diff --git a/quill-sql/src/main/scala/io/getquill/OuterSelect.scala b/quill-sql/src/main/scala/io/getquill/OuterSelect.scala index 7983a6877..7010eaf09 100644 --- a/quill-sql/src/main/scala/io/getquill/OuterSelect.scala +++ b/quill-sql/src/main/scala/io/getquill/OuterSelect.scala @@ -38,11 +38,12 @@ object OuterSelectWrap { import quotes.reflect._ e match { case Expr(expr) => expr - case _ => report.throwError( + case _ => + report.throwError( s""" - |Cannot unlift OuterSelectWrap from the value: ${Format.Expr(e)}. - |The OuterSelectWrap parameter needs to be used as a constant for example: - |run(query[Person].map(p => p.name), OuterSelectWrap.Never) + |Cannot unlift OuterSelectWrap from the value: ${Format.Expr(e)}. + |The OuterSelectWrap parameter needs to be used as a constant for example: + |run(query[Person].map(p => p.name), OuterSelectWrap.Never) """.stripMargin ) } diff --git a/quill-sql/src/main/scala/io/getquill/SqlMirrorContext.scala b/quill-sql/src/main/scala/io/getquill/SqlMirrorContext.scala index 7f839f497..76e53c0b0 100644 --- a/quill-sql/src/main/scala/io/getquill/SqlMirrorContext.scala +++ b/quill-sql/src/main/scala/io/getquill/SqlMirrorContext.scala @@ -6,7 +6,10 @@ import io.getquill.context.mirror.ArrayMirrorEncoding import io.getquill.context.AstSplicing import io.getquill.context.mirror.MirrorSession -/** Workaround for IntelliJ SCL-20185. Inheriting MirrorContextBase directly so that `run` methods have autocomplete. */ +/** + * Workaround for IntelliJ SCL-20185. Inheriting MirrorContextBase directly so + * that `run` methods have autocomplete. + */ class SqlMirrorContext[+Idiom <: BaseIdiom, +Naming <: NamingStrategy](val idiom: Idiom, val naming: Naming) extends MirrorContextBase[Idiom, Naming] with AstSplicing diff --git a/quill-sql/src/main/scala/io/getquill/StaticSplice.scala b/quill-sql/src/main/scala/io/getquill/StaticSplice.scala index 4820d154a..e2fee7042 100644 --- a/quill-sql/src/main/scala/io/getquill/StaticSplice.scala +++ b/quill-sql/src/main/scala/io/getquill/StaticSplice.scala @@ -12,9 +12,9 @@ import java.time.LocalDate import java.time.LocalDateTime /** - * Trait that allows usage of 'static' block. Can declared one of these and use similar to encoders - * but it needs to be compiled in a previous compilation unit and a global static. - * TODO More explanation + * Trait that allows usage of 'static' block. Can declared one of these and use + * similar to encoders but it needs to be compiled in a previous compilation + * unit and a global static. TODO More explanation */ trait ToSql[T] { def toSql(value: T): String @@ -42,13 +42,14 @@ object StringCodec { def summon[T: Type](using Quotes): Either[String, ToSql[T]] = { import quotes.reflect.{Try => TTry, _} for { - summonValue <- Expr.summon[io.getquill.ToSql[T]].toEitherOr(s"a ToString[${Format.TypeOf[T]}] cannot be summoned") + summonValue <- + Expr.summon[io.getquill.ToSql[T]].toEitherOr(s"a ToString[${Format.TypeOf[T]}] cannot be summoned") // Summoning ToSql[T] will given (SpliceString: ToSql[String]) // (a.k.a. Typed(Ident(SpliceString), TypeTree(ToSql[String])) usually with an outer inline surrounding it all) // so then we need to use Untype to just get SpliceString which is a module that we can load staticSpliceType = Untype(summonValue.asTerm.underlyingArgument).tpe.widen - untypedModule <- Load.Module.fromTypeRepr(staticSpliceType).toEither.mapLeft(_.getMessage) - module <- Try(untypedModule.asInstanceOf[io.getquill.ToSql[T]]).toEither.mapLeft(_.getMessage) + untypedModule <- Load.Module.fromTypeRepr(staticSpliceType).toEither.mapLeft(_.getMessage) + module <- Try(untypedModule.asInstanceOf[io.getquill.ToSql[T]]).toEither.mapLeft(_.getMessage) } yield (module) } } @@ -61,37 +62,37 @@ object StringCodec { // Special case for splicing a string directly i.e. need to add 'single-quotes' object SpliceString extends StringCodec[String] { override def toSql(value: String): String = s"'${value}'" - def toString(value: String) = value - def fromString(value: String) = value + def toString(value: String) = value + def fromString(value: String) = value } implicit inline def stringCodec: StringCodec[String] = SpliceString object SpliceInt extends StringCodec[Int] { - def toString(value: Int) = s"${value}" + def toString(value: Int) = s"${value}" def fromString(value: String) = value.toInt } implicit inline def intCodec: StringCodec[Int] = SpliceInt object SpliceShort extends StringCodec[Short] { - def toString(value: Short) = s"${value}" + def toString(value: Short) = s"${value}" def fromString(value: String) = value.toShort } implicit inline def shortCodec: StringCodec[Short] = SpliceShort object SpliceLong extends StringCodec[Long] { - def toString(value: Long) = s"${value}" + def toString(value: Long) = s"${value}" def fromString(value: String) = value.toLong } implicit inline def longCodec: ToString[Long] = SpliceLong object SpliceFloat extends StringCodec[Float] { - def toString(value: Float) = s"${value}" + def toString(value: Float) = s"${value}" def fromString(value: String) = value.toFloat } implicit inline def floatCodec: StringCodec[Float] = SpliceFloat object SpliceDouble extends StringCodec[Double] { - def toString(value: Double) = s"${value}" + def toString(value: Double) = s"${value}" def fromString(value: String) = value.toDouble } implicit inline def doubleCodec: StringCodec[Double] = SpliceDouble @@ -103,7 +104,7 @@ private[getquill] object DateFormats { object SpliceDate extends StringCodec[java.sql.Date] { override def toSql(value: java.sql.Date): String = s"'${toString(value)}'" - def toString(value: java.sql.Date) = value.toLocalDate.format(DateFormats.printFormat) + def toString(value: java.sql.Date) = value.toLocalDate.format(DateFormats.printFormat) def fromString(value: String) = { val local = LocalDate.parse(value, DateFormats.parseFormat) java.sql.Date.valueOf(local) @@ -113,14 +114,14 @@ implicit inline def dateCodec: StringCodec[java.sql.Date] = SpliceDate object SpliceLocalDate extends StringCodec[java.time.LocalDate] { override def toSql(value: java.time.LocalDate): String = s"'${toString(value)}'" - def toString(value: java.time.LocalDate) = value.format(DateFormats.printFormat) - def fromString(value: String) = LocalDate.parse(value, DateFormats.parseFormat) + def toString(value: java.time.LocalDate) = value.format(DateFormats.printFormat) + def fromString(value: String) = LocalDate.parse(value, DateFormats.parseFormat) } implicit inline def localDateCodec: StringCodec[java.time.LocalDate] = SpliceLocalDate object SpliceLocalDateTime extends StringCodec[java.time.LocalDateTime] { override def toSql(value: java.time.LocalDateTime): String = s"'${toString(value)}'" - def toString(value: java.time.LocalDateTime) = value.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) - def fromString(value: String) = LocalDateTime.parse(value, DateTimeFormatter.ISO_LOCAL_DATE_TIME) + def toString(value: java.time.LocalDateTime) = value.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + def fromString(value: String) = LocalDateTime.parse(value, DateTimeFormatter.ISO_LOCAL_DATE_TIME) } implicit inline def localDateTimeCodec: StringCodec[java.time.LocalDateTime] = SpliceLocalDateTime diff --git a/quill-sql/src/main/scala/io/getquill/UpperCaseNonDefault.scala b/quill-sql/src/main/scala/io/getquill/UpperCaseNonDefault.scala index 82f795ee2..b56514239 100644 --- a/quill-sql/src/main/scala/io/getquill/UpperCaseNonDefault.scala +++ b/quill-sql/src/main/scala/io/getquill/UpperCaseNonDefault.scala @@ -2,14 +2,14 @@ package io.getquill trait UpperCaseNonDefault extends NamingStrategy { override def column(s: String): String = s.toUpperCase - override def table(s: String): String = s.toUpperCase - override def default(s: String) = s + override def table(s: String): String = s.toUpperCase + override def default(s: String) = s } object UpperCaseNonDefault extends UpperCaseNonDefault trait UpperCaseEscapeColumn extends NamingStrategy { override def column(s: String): String = s""""${s.toUpperCase}"""" - override def table(s: String): String = s - override def default(s: String) = s + override def table(s: String): String = s + override def default(s: String) = s } object UpperCaseEscapeColumn extends UpperCaseEscapeColumn diff --git a/quill-sql/src/main/scala/io/getquill/context/Context.scala b/quill-sql/src/main/scala/io/getquill/context/Context.scala index 2422b78e0..cd9fcad3c 100644 --- a/quill-sql/src/main/scala/io/getquill/context/Context.scala +++ b/quill-sql/src/main/scala/io/getquill/context/Context.scala @@ -57,17 +57,23 @@ trait ContextStandard[+Idiom <: io.getquill.idiom.Idiom, +Naming <: NamingStrate with ContextVerbPrepareLambda[Idiom, Naming] trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] - extends ProtoContextSecundus[Dialect, Naming] with EncodingDsl with Closeable { + extends ProtoContextSecundus[Dialect, Naming] + with EncodingDsl + with Closeable { self => /** - * Base type used to determine whether there is an execution context that needs to be summoned to perform - * execution methods e.g. in the PostgresJasync contexts that use Scala Futures that need an ExecutionContext. + * Base type used to determine whether there is an execution context that + * needs to be summoned to perform execution methods e.g. in the + * PostgresJasync contexts that use Scala Futures that need an + * ExecutionContext. */ type RunnerBehavior <: RunnerSummoningBehavior protected def context: Runner = fail(s"Runner method not implemented for '${this.getClass.getName}' Context") - implicit inline def dec[T]: GenericDecoder[ResultRow, Session, T, DecodingType.Generic] = ${ GenericDecoder.summon[T, ResultRow, Session] } + implicit inline def dec[T]: GenericDecoder[ResultRow, Session, T, DecodingType.Generic] = ${ + GenericDecoder.summon[T, ResultRow, Session] + } // def probe(statement: String): Try[_] // todo add 'prepare' i.e. encoders here @@ -94,22 +100,31 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] extension [T](inline entity: EntityQuery[T]) { inline def insertValue(inline value: T): Insert[T] = ${ InsertUpdateMacro.static[T, Insert]('entity, 'value) } inline def updateValue(inline value: T): Update[T] = ${ InsertUpdateMacro.static[T, Update]('entity, 'value) } - private[getquill] inline def insertValueDynamic(inline value: T): Insert[T] = ${ InsertUpdateMacro.dynamic[T, Insert]('entity, 'value) } - private[getquill] inline def updateValueDynamic(inline value: T): Update[T] = ${ InsertUpdateMacro.dynamic[T, Update]('entity, 'value) } + private[getquill] inline def insertValueDynamic(inline value: T): Insert[T] = ${ + InsertUpdateMacro.dynamic[T, Insert]('entity, 'value) + } + private[getquill] inline def updateValueDynamic(inline value: T): Update[T] = ${ + InsertUpdateMacro.dynamic[T, Update]('entity, 'value) + } } extension [T](inline quotedEntity: Quoted[EntityQuery[T]]) { - inline def insertValue(inline value: T): Insert[T] = io.getquill.unquote[EntityQuery[T]](quotedEntity).insertValue(value) - inline def updateValue(inline value: T): Update[T] = io.getquill.unquote[EntityQuery[T]](quotedEntity).updateValue(value) - private[getquill] inline def insertValueDynamic(inline value: T): Insert[T] = io.getquill.unquote[EntityQuery[T]](quotedEntity).insertValueDynamic(value) - private[getquill] inline def updateValueDynamic(inline value: T): Update[T] = io.getquill.unquote[EntityQuery[T]](quotedEntity).updateValueDynamic(value) + inline def insertValue(inline value: T): Insert[T] = + io.getquill.unquote[EntityQuery[T]](quotedEntity).insertValue(value) + inline def updateValue(inline value: T): Update[T] = + io.getquill.unquote[EntityQuery[T]](quotedEntity).updateValue(value) + private[getquill] inline def insertValueDynamic(inline value: T): Insert[T] = + io.getquill.unquote[EntityQuery[T]](quotedEntity).insertValueDynamic(value) + private[getquill] inline def updateValueDynamic(inline value: T): Update[T] = + io.getquill.unquote[EntityQuery[T]](quotedEntity).updateValueDynamic(value) } extension [T](inline q: Query[T]) { /** - * When using this with FilterColumns make sure it comes FIRST. Otherwise the columns are you filtering - * may have been nullified in the SQL before the filteration has actually happened. + * When using this with FilterColumns make sure it comes FIRST. Otherwise + * the columns are you filtering may have been nullified in the SQL before + * the filteration has actually happened. */ inline def filterByKeys(inline map: Map[String, Any]) = q.filter(p => MapFlicer[T, PrepareRow, Session](p, map)) @@ -135,6 +150,7 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] * delegate to these in the individual contexts. */ object InternalApi { + /** Internal API that cannot be made private due to how inline functions */ inline def _summonRunner() = DatasourceContextInjectionMacro[RunnerBehavior, Runner, self.type](context) @@ -142,7 +158,8 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] runQuery(quoted, OuterSelectWrap.Default) // Must be lazy since idiom/naming are null (in some contexts) initially due to initialization order - private lazy val make = ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, self.type](self.idiom, self.naming) + private lazy val make = + ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, self.type](self.idiom, self.naming) inline def runQuery[T](inline quoted: Quoted[Query[T]], inline wrap: OuterSelectWrap): Result[RunQueryResult[T]] = { val ca = make.op[Nothing, T, Result[RunQueryResult[T]]] { arg => @@ -167,25 +184,38 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] QueryExecution.apply(ca)(quoted, None) } - inline def runActionReturning[E, T](inline quoted: Quoted[ActionReturning[E, T]]): Result[RunActionReturningResult[T]] = { + inline def runActionReturning[E, T]( + inline quoted: Quoted[ActionReturning[E, T]] + ): Result[RunActionReturningResult[T]] = { val ca = make.op[E, T, Result[RunActionReturningResult[T]]] { arg => // Need an extractor with special information that helps with the SQL returning specifics val returningExt = arg.extractor.requireReturning() - self.executeActionReturning(arg.sql, arg.prepare, returningExt.extract, returningExt.returningBehavior)(arg.executionInfo, _summonRunner()) + self.executeActionReturning(arg.sql, arg.prepare, returningExt.extract, returningExt.returningBehavior)( + arg.executionInfo, + _summonRunner() + ) } QueryExecution.apply(ca)(quoted, None) } - inline def runActionReturningMany[E, T](inline quoted: Quoted[ActionReturning[E, List[T]]]): Result[RunActionReturningResult[List[T]]] = { + inline def runActionReturningMany[E, T]( + inline quoted: Quoted[ActionReturning[E, List[T]]] + ): Result[RunActionReturningResult[List[T]]] = { val ca = make.op[E, T, Result[RunActionReturningResult[List[T]]]] { arg => // Need an extractor with special information that helps with the SQL returning specifics val returningExt = arg.extractor.requireReturning() - self.executeActionReturningMany(arg.sql, arg.prepare, returningExt.extract, returningExt.returningBehavior)(arg.executionInfo, _summonRunner()) + self.executeActionReturningMany(arg.sql, arg.prepare, returningExt.extract, returningExt.returningBehavior)( + arg.executionInfo, + _summonRunner() + ) } QueryExecution.apply(ca)(quoted, None) } - inline def runBatchAction[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): Result[RunBatchActionResult] = { + inline def runBatchAction[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): Result[RunBatchActionResult] = { val ca = make.batch[I, Nothing, A, Result[RunBatchActionResult]] { arg => // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. val groups = arg.groups.map((sql, prepare) => BatchGroup(sql, prepare)) @@ -194,7 +224,10 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] QueryExecutionBatch.apply(ca, rowsPerBatch)(quoted) } - inline def runBatchActionReturning[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], rowsPerBatch: Int): Result[RunBatchActionReturningResult[T]] = { + inline def runBatchActionReturning[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]], + rowsPerBatch: Int + ): Result[RunBatchActionReturningResult[T]] = { val ca = make.batch[I, T, A, Result[RunBatchActionReturningResult[T]]] { arg => val returningExt = arg.extractor.requireReturning() // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. @@ -213,7 +246,9 @@ trait Context[+Dialect <: Idiom, +Naming <: NamingStrategy] case other => // Note. If we want to cross-compile to ScalaJS this will only work for the JVM variant. Have a look at ContextLog // in Scala2-Quill for an approach on how to do that. - Logger(s"Expected a single result from the query: `${sql}` but got: ${abbrevList(other)}. Only the 1st result will be returned!") + Logger( + s"Expected a single result from the query: `${sql}` but got: ${abbrevList(other)}. Only the 1st result will be returned!" + ) other.head } diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextEffect.scala b/quill-sql/src/main/scala/io/getquill/context/ContextEffect.scala index 330a05bd4..bb911470f 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextEffect.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextEffect.scala @@ -4,9 +4,9 @@ import scala.language.higherKinds // TODO In the Scala 2 Quill, move this into the Portable module /** - * In order to be able to reuse methods in the Jdbc Context as well as others, there must be a way - * to encapsulate the effects of these contexts. This simple interface provides them in a fairly - * generic manner. + * In order to be able to reuse methods in the Jdbc Context as well as others, + * there must be a way to encapsulate the effects of these contexts. This simple + * interface provides them in a fairly generic manner. */ trait ContextEffect[F[_]] { @@ -22,7 +22,8 @@ trait ContextEffect[F[_]] { /** * Aggregate a list of effects into a single effect element. Most effect types - * used in Quill context easily support this kind of operation e.g. Futures, monix Tasks, Observables, etc... + * used in Quill context easily support this kind of operation e.g. Futures, + * monix Tasks, Observables, etc... */ def seq[A](f: List[F[A]]): F[List[A]] } diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextHelp.scala b/quill-sql/src/main/scala/io/getquill/context/ContextHelp.scala index 801dd3455..9edf852bf 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextHelp.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextHelp.scala @@ -5,20 +5,27 @@ import io.getquill.ReturnAction sealed trait RunnerSummoningBehavior object RunnerSummoningBehavior { sealed trait Implicit extends RunnerSummoningBehavior - object Implicit extends Implicit - sealed trait Member extends RunnerSummoningBehavior - object Member extends Member + object Implicit extends Implicit + sealed trait Member extends RunnerSummoningBehavior + object Member extends Member } sealed trait Extraction[-ResultRow, -Session, +T] { - /** Require an effect to be be simple and retrieve it. Effectful at compile-time since it can fail compilation */ + + /** + * Require an effect to be be simple and retrieve it. Effectful at + * compile-time since it can fail compilation + */ def requireSimple() = this match { case ext: Extraction.Simple[_, _, _] => ext case _ => throw new IllegalArgumentException("Extractor required") } - /** Require an effect to be be returning and retrieve it. Effectful at compile-time since it can fail compilation */ + /** + * Require an effect to be be returning and retrieve it. Effectful at + * compile-time since it can fail compilation + */ def requireReturning() = this match { case ext: Extraction.Returning[_, _, _] => ext @@ -28,6 +35,7 @@ sealed trait Extraction[-ResultRow, -Session, +T] { object Extraction { case class Simple[ResultRow, Session, T](extract: (ResultRow, Session) => T) extends Extraction[ResultRow, Session, T] - case class Returning[ResultRow, Session, T](extract: (ResultRow, Session) => T, returningBehavior: ReturnAction) extends Extraction[ResultRow, Session, T] + case class Returning[ResultRow, Session, T](extract: (ResultRow, Session) => T, returningBehavior: ReturnAction) + extends Extraction[ResultRow, Session, T] case object None extends Extraction[Any, Any, Nothing] } diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala b/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala index 64f072d61..908d61bc0 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepare.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -51,20 +51,30 @@ trait ContextVerbPrepare[+Dialect <: Idiom, +Naming <: NamingStrategy] { type Session type Runner - type PrepareQueryResult // Usually: Session => Result[PrepareRow] - type PrepareActionResult // Usually: Session => Result[PrepareRow] + type PrepareQueryResult // Usually: Session => Result[PrepareRow] + type PrepareActionResult // Usually: Session => Result[PrepareRow] type PrepareBatchActionResult // Usually: Session => Result[List[PrepareRow]] - def prepareQuery(sql: String, prepare: Prepare = identityPrepare)(executionInfo: ExecutionInfo, dc: Runner): PrepareQueryResult - def prepareSingle(sql: String, prepare: Prepare = identityPrepare)(executionInfo: ExecutionInfo, dc: Runner): PrepareQueryResult - def prepareAction(sql: String, prepare: Prepare = identityPrepare)(executionInfo: ExecutionInfo, dc: Runner): PrepareActionResult + def prepareQuery(sql: String, prepare: Prepare = identityPrepare)( + executionInfo: ExecutionInfo, + dc: Runner + ): PrepareQueryResult + def prepareSingle(sql: String, prepare: Prepare = identityPrepare)( + executionInfo: ExecutionInfo, + dc: Runner + ): PrepareQueryResult + def prepareAction(sql: String, prepare: Prepare = identityPrepare)( + executionInfo: ExecutionInfo, + dc: Runner + ): PrepareActionResult def prepareBatchAction(groups: List[BatchGroup])(executionInfo: ExecutionInfo, dc: Runner): PrepareBatchActionResult // Summon a implicit execution context if needed (e.g. in Jasync contexts) inline def _summonPrepareRunner() = DatasourceContextInjectionMacro[RunnerBehavior, Runner, this.type](context) // Must be lazy since idiom/naming are null (in some contexts) initially due to initialization order - private lazy val make = ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, this.type](self.idiom, self.naming) + private lazy val make = + ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, this.type](self.idiom, self.naming) @targetName("runPrepareQuery") inline def prepare[T](inline quoted: Quoted[Query[T]]): PrepareQueryResult = { @@ -86,7 +96,9 @@ trait ContextVerbPrepare[+Dialect <: Idiom, +Naming <: NamingStrategy] { } @targetName("runPrepareBatchAction") - inline def prepare[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): PrepareBatchActionResult = { + inline def prepare[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]] + ): PrepareBatchActionResult = { val ca = make.batch[I, Nothing, A, PrepareBatchActionResult] { arg => val groups = arg.groups.map((sql, prepare) => BatchGroup(sql, prepare)) self.prepareBatchAction(groups.toList)(arg.executionInfo, _summonPrepareRunner()) diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepareLambda.scala b/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepareLambda.scala index 2955b8095..4280d2286 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepareLambda.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextVerbPrepareLambda.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -44,10 +44,11 @@ import io.getquill.metaprog.etc.ColumnsFlicer import io.getquill.context.Execution.ElaborationBehavior import io.getquill.OuterSelectWrap -trait ContextVerbPrepareLambda[+Dialect <: Idiom, +Naming <: NamingStrategy] extends ContextVerbPrepare[Dialect, Naming] { +trait ContextVerbPrepareLambda[+Dialect <: Idiom, +Naming <: NamingStrategy] + extends ContextVerbPrepare[Dialect, Naming] { self: Context[Dialect, Naming] => - type PrepareQueryResult = Session => Result[PrepareRow] - type PrepareActionResult = Session => Result[PrepareRow] + type PrepareQueryResult = Session => Result[PrepareRow] + type PrepareActionResult = Session => Result[PrepareRow] type PrepareBatchActionResult = Session => Result[List[PrepareRow]] } // end ContextVerbPrepareLambda diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala b/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala index ab73d6421..03c114f6f 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextVerbStream.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -44,22 +44,28 @@ import io.getquill.metaprog.etc.ColumnsFlicer import io.getquill.context.Execution.ElaborationBehavior import io.getquill.OuterSelectWrap -trait ContextVerbStream[+Dialect <: io.getquill.idiom.Idiom, +Naming <: NamingStrategy] extends ProtoStreamContext[Dialect, Naming] { +trait ContextVerbStream[+Dialect <: io.getquill.idiom.Idiom, +Naming <: NamingStrategy] + extends ProtoStreamContext[Dialect, Naming] { self: Context[Dialect, Naming] => // Must be lazy since idiom/naming are null (in some contexts) initially due to initialization order - private lazy val make = ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, this.type](self.idiom, self.naming) + private lazy val make = + ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, this.type](self.idiom, self.naming) @targetName("streamQuery") inline def stream[T](inline quoted: Quoted[Query[T]]): StreamResult[T] = _streamInternal[T](quoted, None) @targetName("streamQueryWithFetchSize") - inline def stream[T](inline quoted: Quoted[Query[T]], fetchSize: Int): StreamResult[T] = _streamInternal[T](quoted, Some(fetchSize)) + inline def stream[T](inline quoted: Quoted[Query[T]], fetchSize: Int): StreamResult[T] = + _streamInternal[T](quoted, Some(fetchSize)) /** Internal API that cannot be made private due to how inline functions */ inline def _streamInternal[T](inline quoted: Quoted[Query[T]], fetchSize: Option[Int]): StreamResult[T] = { val ca = make.op[Nothing, T, StreamResult[T]] { arg => val simpleExt = arg.extractor.requireSimple() - self.streamQuery(arg.fetchSize, arg.sql, arg.prepare, simpleExt.extract)(arg.executionInfo, InternalApi._summonRunner()) + self.streamQuery(arg.fetchSize, arg.sql, arg.prepare, simpleExt.extract)( + arg.executionInfo, + InternalApi._summonRunner() + ) } QueryExecution.apply(ca)(quoted, fetchSize) } diff --git a/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala b/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala index eb88865f8..c9d23b063 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ContextVerbTranslate.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -49,9 +49,9 @@ trait ContextVerbTranslate[+Dialect <: Idiom, +Naming <: NamingStrategy] extends ContextTranslateMacro[Dialect, Naming] { self: Context[Dialect, Naming] => override type TranslateResult[T] = T - override def wrap[T](t: => T): T = t + override def wrap[T](t: => T): T = t override def push[A, B](result: A)(f: A => B): B = f(result) - override def seq[A](list: List[A]): List[A] = list + override def seq[A](list: List[A]): List[A] = list } trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] @@ -74,10 +74,12 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] def translateContext: TranslateRunner /** Internal API that cannot be made private due to how inline functions */ - inline def _summonTranslateRunner() = DatasourceContextInjectionMacro[RunnerBehavior, TranslateRunner, this.type](translateContext) + inline def _summonTranslateRunner() = + DatasourceContextInjectionMacro[RunnerBehavior, TranslateRunner, this.type](translateContext) // Must be lazy since idiom/naming are null (in some contexts) initially due to initialization order - private lazy val make = ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, this.type](self.idiom, self.naming) + private lazy val make = + ContextOperation.Factory[Dialect, Naming, PrepareRow, ResultRow, Session, this.type](self.idiom, self.naming) @targetName("translateQuery") inline def translate[T](inline quoted: Quoted[Query[T]]): TranslateResult[String] = translate(quoted, false) @@ -85,7 +87,10 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def translate[T](inline quoted: Quoted[Query[T]], inline prettyPrint: Boolean): TranslateResult[String] = { val ca = make.op[Nothing, T, TranslateResult[String]] { arg => val simpleExt = arg.extractor.requireSimple() - self.translateQueryEndpoint(arg.sql, arg.prepare, simpleExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, simpleExt.extract, prettyPrint)( + arg.executionInfo, + _summonTranslateRunner() + ) } QueryExecution.apply(ca)(quoted, None) } @@ -96,7 +101,10 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] inline def translate[T](inline quoted: Quoted[T], inline prettyPrint: Boolean): TranslateResult[String] = { val ca = make.op[Nothing, T, TranslateResult[String]] { arg => val simpleExt = arg.extractor.requireSimple() - self.translateQueryEndpoint(arg.sql, arg.prepare, simpleExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, simpleExt.extract, prettyPrint)( + arg.executionInfo, + _summonTranslateRunner() + ) } QueryExecution.apply(ca)(QuerySingleAsQuery(quoted), None) } @@ -106,26 +114,41 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] @targetName("translateAction") inline def translate[E](inline quoted: Quoted[Action[E]], inline prettyPrint: Boolean): TranslateResult[String] = { val ca = make.op[E, Any, TranslateResult[String]] { arg => - self.translateQueryEndpoint(arg.sql, arg.prepare, prettyPrint = prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, prettyPrint = prettyPrint)( + arg.executionInfo, + _summonTranslateRunner() + ) } QueryExecution.apply(ca)(quoted, None) } @targetName("translateActionReturning") - inline def translate[E, T](inline quoted: Quoted[ActionReturning[E, T]]): TranslateResult[String] = translate(quoted, false) + inline def translate[E, T](inline quoted: Quoted[ActionReturning[E, T]]): TranslateResult[String] = + translate(quoted, false) @targetName("translateActionReturning") - inline def translate[E, T](inline quoted: Quoted[ActionReturning[E, T]], inline prettyPrint: Boolean): TranslateResult[String] = { + inline def translate[E, T]( + inline quoted: Quoted[ActionReturning[E, T]], + inline prettyPrint: Boolean + ): TranslateResult[String] = { val ca = make.op[E, T, TranslateResult[String]] { arg => val returningExt = arg.extractor.requireReturning() - self.translateQueryEndpoint(arg.sql, arg.prepare, returningExt.extract, prettyPrint)(arg.executionInfo, _summonTranslateRunner()) + self.translateQueryEndpoint(arg.sql, arg.prepare, returningExt.extract, prettyPrint)( + arg.executionInfo, + _summonTranslateRunner() + ) } QueryExecution.apply(ca)(quoted, None) } @targetName("translateBatchAction") - inline def translate[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]]): TranslateResult[List[String]] = translate(quoted, false) + inline def translate[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]] + ): TranslateResult[List[String]] = translate(quoted, false) @targetName("translateBatchAction") - inline def translate[I, A <: Action[I] & QAC[I, Nothing]](inline quoted: Quoted[BatchAction[A]], inline prettyPrint: Boolean): TranslateResult[List[String]] = { + inline def translate[I, A <: Action[I] & QAC[I, Nothing]]( + inline quoted: Quoted[BatchAction[A]], + inline prettyPrint: Boolean + ): TranslateResult[List[String]] = { val ca = make.batch[I, Nothing, A, TranslateResult[List[String]]] { arg => // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. val groups = arg.groups.map((sql, prepare) => BatchGroup(sql, prepare)) @@ -135,9 +158,14 @@ trait ContextTranslateMacro[+Dialect <: Idiom, +Naming <: NamingStrategy] } @targetName("translateBatchActionReturning") - inline def translate[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]]): TranslateResult[List[String]] = translate(quoted, false) + inline def translate[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]] + ): TranslateResult[List[String]] = translate(quoted, false) @targetName("translateBatchActionReturning") - inline def translate[I, T, A <: Action[I] & QAC[I, T]](inline quoted: Quoted[BatchAction[A]], inline prettyPrint: Boolean): TranslateResult[List[String]] = { + inline def translate[I, T, A <: Action[I] & QAC[I, T]]( + inline quoted: Quoted[BatchAction[A]], + inline prettyPrint: Boolean + ): TranslateResult[List[String]] = { val ca = make.batch[I, T, A, TranslateResult[List[String]]] { arg => val returningExt = arg.extractor.requireReturning() // Supporting only one top-level query batch group. Don't know if there are use-cases for multiple queries. @@ -158,12 +186,17 @@ trait ContextTranslateProto[+Dialect <: Idiom, +Naming <: NamingStrategy] { def push[A, B](result: TranslateResult[A])(f: A => B): TranslateResult[B] def seq[A](list: List[TranslateResult[A]]): TranslateResult[List[A]] - def translateQueryEndpoint[T](statement: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor, prettyPrint: Boolean = false)(executionInfo: ExecutionInfo, dc: TranslateRunner): TranslateResult[String] = + def translateQueryEndpoint[T]( + statement: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor, + prettyPrint: Boolean = false + )(executionInfo: ExecutionInfo, dc: TranslateRunner): TranslateResult[String] = push(prepareParams(statement, prepare)) { params => val query = if (params.nonEmpty) { - params.foldLeft(statement) { - case (expanded, param) => expanded.replaceFirst("\\?", param) + params.foldLeft(statement) { case (expanded, param) => + expanded.replaceFirst("\\?", param) } } else { statement @@ -175,7 +208,10 @@ trait ContextTranslateProto[+Dialect <: Idiom, +Naming <: NamingStrategy] { query } - def translateBatchQueryEndpoint(groups: List[BatchGroup], prettyPrint: Boolean = false)(executionInfo: ExecutionInfo, dc: TranslateRunner): TranslateResult[List[String]] = + def translateBatchQueryEndpoint( + groups: List[BatchGroup], + prettyPrint: Boolean = false + )(executionInfo: ExecutionInfo, dc: TranslateRunner): TranslateResult[List[String]] = seq { groups.flatMap { group => group.prepare.map { prepare => @@ -184,7 +220,10 @@ trait ContextTranslateProto[+Dialect <: Idiom, +Naming <: NamingStrategy] { } } - def translateBatchQueryReturningEndpoint(groups: List[BatchGroupReturning], prettyPrint: Boolean = false)(executionInfo: ExecutionInfo, dc: TranslateRunner): TranslateResult[List[String]] = + def translateBatchQueryReturningEndpoint( + groups: List[BatchGroupReturning], + prettyPrint: Boolean = false + )(executionInfo: ExecutionInfo, dc: TranslateRunner): TranslateResult[List[String]] = seq { groups.flatMap { group => group.prepare.map { prepare => diff --git a/quill-sql/src/main/scala/io/getquill/context/DatasourceContextInjectionMacro.scala b/quill-sql/src/main/scala/io/getquill/context/DatasourceContextInjectionMacro.scala index f22835823..a3f266e99 100644 --- a/quill-sql/src/main/scala/io/getquill/context/DatasourceContextInjectionMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/DatasourceContextInjectionMacro.scala @@ -7,7 +7,9 @@ object DatasourceContextInjectionMacro { inline def apply[DCI <: RunnerSummoningBehavior, Runner, Ctx](inline memberDc: Runner): Runner = ${ applyImpl[DCI, Runner, Ctx]('memberDc) } - def applyImpl[DCI <: RunnerSummoningBehavior: Type, Runner: Type, Ctx: Type](memberDc: Expr[Runner])(using quotes: Quotes): Expr[Runner] = { + def applyImpl[DCI <: RunnerSummoningBehavior: Type, Runner: Type, Ctx: Type]( + memberDc: Expr[Runner] + )(using quotes: Quotes): Expr[Runner] = { import quotes.reflect._ val dciType = TypeRepr.of[DCI] if (dciType <:< TypeRepr.of[RunnerSummoningBehavior.Implicit]) diff --git a/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala b/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala index a18694a6d..a04100e63 100644 --- a/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMacro.scala @@ -34,56 +34,53 @@ import java.util.UUID import io.getquill.metaprog.Extractors /** - * TODO Right now this is just insert but we can easily extend to update and delete + * TODO Right now this is just insert but we can easily extend to update and + * delete * - * The function call that regularly drives query insertion is - * {code} - * query[T].insert(_.field1 -> value, _.field2 -> value, etc...) - * {code} - * Let's call this the field-insertion api. - * - * This macro essentially takes an insert of the form `query[T].insert(T(...))` and converts into the former form. + * The function call that regularly drives query insertion is {code} + * query[T].insert(_.field1 -> value, _.field2 -> value, etc...) {code} Let's + * call this the field-insertion api. * - * Once we've parsed an insert e.g. `query[Person]insertValue(Person("Joe", "Bloggs"))` we then need to synthesize - * the insertions that this would represent e.g. `query[Person].insert(_.firstName -> "Joe", _.lastName -> "Bloggs")` + * This macro essentially takes an insert of the form `query[T].insert(T(...))` + * and converts into the former form. * - * Each function of field-insertion API basically takes the form - * {code} (v) => vAssignmentProperty -> assignmentValue (on the AST) {code} + * Once we've parsed an insert e.g. `query[Person]insertValue(Person("Joe", + * "Bloggs"))` we then need to synthesize the insertions that this would + * represent e.g. `query[Person].insert(_.firstName -> "Joe", _.lastName -> + * "Bloggs")` * - * Let's take a look at a slighly more complex example - * Given: - * {code} - * case class Person(name: String, age: Option[Age]); Age(value: Int) - * quote { query[Person].insert(Person("Joe", Age(345))) } - * {code} + * Each function of field-insertion API basically takes the form {code} (v) => + * vAssignmentProperty -> assignmentValue (on the AST) {code} * - * This expands out into a series of statements which will be parsed to AST assignments - * This: `(v: Person) => v.name -> (v:Person).name` - * Will be parsed into this: - * {code} Assignment(Id(v), Prop(Id(v), name), Constant("Joe")) {code} + * Let's take a look at a slighly more complex example Given: {code} case class + * Person(name: String, age: Option[Age]); Age(value: Int) quote { + * query[Person].insert(Person("Joe", Age(345))) } {code} * - * This: `(v: Person) => v.age.map(v => v.value) -> Option(v:Age).map(v => v.value)` - * Will be parsed into this: - * {code} - * Assignment(Id(v), - * OptionTableMap(Prop(Id(v), age), Id(v), Prop(Id(v), value)) - * OptionTableMap(OptionApply(CaseClass(value=345)), Id(v), Prop(Id(v), value)) - * ) + * This expands out into a series of statements which will be parsed to AST + * assignments This: `(v: Person) => v.name -> (v:Person).name` Will be parsed + * into this: {code} Assignment(Id(v), Prop(Id(v), name), Constant("Joe")) * {code} * - * The end result of this synthesis is a series of assignments for an insert for the given entity. + * This: `(v: Person) => v.age.map(v => v.value) -> Option(v:Age).map(v => + * v.value)` Will be parsed into this: {code} Assignment(Id(v), + * OptionTableMap(Prop(Id(v), age), Id(v), Prop(Id(v), value)) + * OptionTableMap(OptionApply(CaseClass(value=345)), Id(v), Prop(Id(v), value)) + * ) {code} * - * Another possiblity is that the entity is lifted: - * {code} - * case class Person(name: String, age: Option[Age]); Age(value: Int) - * quote { query[Person].insertValue(lift(Person("Joe", Age(345)))) } - * {code} - * TODO Finish doc + * The end result of this synthesis is a series of assignments for an insert for + * the given entity. + * + * Another possiblity is that the entity is lifted: {code} case class + * Person(name: String, age: Option[Age]); Age(value: Int) quote { + * query[Person].insertValue(lift(Person("Joe", Age(345)))) } {code} TODO Finish + * doc * - * Note that as a result of the way this is implemented, if either the InsertMeta or the SchemaMeta is not - * inline, the entire resulting query will not be inline since they both will be summoned and used in the - * resulting expressions. It might be useful to introduce a configuration parameter to ignore non-inline InsertMetas - * or non-inline SchemaMetas. Or maybe this could even be an annotation. + * Note that as a result of the way this is implemented, if either the + * InsertMeta or the SchemaMeta is not inline, the entire resulting query will + * not be inline since they both will be summoned and used in the resulting + * expressions. It might be useful to introduce a configuration parameter to + * ignore non-inline InsertMetas or non-inline SchemaMetas. Or maybe this could + * even be an annotation. */ object InsertUpdateMacro { // Using this ident since it is replaced in cleanIdent so user will not create something conflicting with it @@ -99,14 +96,17 @@ object InsertUpdateMacro { def retrieveAssignmentTuple(quoted: Quoted[_]): Set[Ast] = quoted.ast match { case Tuple(values) if (values.forall(_.isInstanceOf[Property])) => values.toSet - case other => throw new IllegalArgumentException(s"Invalid values in InsertMeta: ${other}. An InsertMeta AST must be a tuple of Property elements.") + case other => + throw new IllegalArgumentException( + s"Invalid values in InsertMeta: ${other}. An InsertMeta AST must be a tuple of Property elements." + ) } } // Summon state of a schemaMeta (i.e. whether an implicit one could be summoned and whether it is static (i.e. can produce a compile-time query or dynamic)) enum EntitySummonState[+T] { case Static(value: T, lifts: List[Expr[Planter[?, ?, ?]]]) extends EntitySummonState[T] - case Dynamic(uid: String, quotation: Expr[Quoted[Any]]) extends EntitySummonState[Nothing] + case Dynamic(uid: String, quotation: Expr[Quoted[Any]]) extends EntitySummonState[Nothing] def print(using Quotes): String = this match { case Static(value, lifts) => @@ -118,19 +118,22 @@ object InsertUpdateMacro { // Summon state of a updateMeta/insertMeta that indicates which columns to ignore (i.e. whether an implicit one could be summoned and whether it is static (i.e. can produce a compile-time query or dynamic)) enum IgnoresSummonState[+T] { - case Static(value: T) extends IgnoresSummonState[T] + case Static(value: T) extends IgnoresSummonState[T] case Dynamic(quotation: Expr[Quoted[Any]]) extends IgnoresSummonState[Nothing] } /** - * Perform the pipeline of creating an insert statement. The 'insertee' is the case class on which the SQL insert - * statement is based. The schema is based on the EntityQuery which could potentially be an unquoted QuerySchema. + * Perform the pipeline of creating an insert statement. The 'insertee' is the + * case class on which the SQL insert statement is based. The schema is based + * on the EntityQuery which could potentially be an unquoted QuerySchema. */ - class Pipeline[T: Type, A[T] <: Insert[T] | Update[T]: Type](isStatic: Boolean)(using Quotes) extends QuatMaking with QuatMakingBase { + class Pipeline[T: Type, A[T] <: Insert[T] | Update[T]: Type](isStatic: Boolean)(using Quotes) + extends QuatMaking + with QuatMakingBase { import quotes.reflect._ import io.getquill.util.Messages.qprint given TranspileConfig = SummonTranspileConfig() - val parser = SummonParser().assemble + val parser = SummonParser().assemble case class InserteeSchema(schemaRaw: Expr[EntityQuery[T]]) { private def plainEntity: Entity = { @@ -172,7 +175,9 @@ object InsertUpdateMacro { ) EntitySummonState.Dynamic(uid, quotation) case _ => - report.throwError(s"Quotation Lot of Insert/UpdateMeta must be either pluckable or uprootable from: '${unquotation}'") + report.throwError( + s"Quotation Lot of Insert/UpdateMeta must be either pluckable or uprootable from: '${unquotation}'" + ) } // Case where it's not just an EntityQuery that is in the front of the update/insertValue e.g. a filter @@ -185,7 +190,7 @@ object InsertUpdateMacro { // quote { query[Person].filter(...lift(runtimeValue)...).update/insertValue(...) } // so these lifts need to be extracted. case scheme @ '{ ($q: EntityQuery[t]) } => - val ast = parser(q) + val ast = parser(q) val (rawLifts, runtimeLifts) = ExtractLifts(q) if (!runtimeLifts.isEmpty) { // In this particular case: @@ -207,10 +212,14 @@ object InsertUpdateMacro { // We Create => |Quoted( Filter(QuoteTag(uid:111), u, ...ScalarTag(uid:222)...), EagerLift(uid:222,...), QuotationVase(uid:111, $v:query[Person]) ) val uid = UUID.randomUUID().toString() if (isStatic) - report.warning(s"A non-inlined expression (that defines a query for ${Format.TypeRepr(schemaRaw.asTerm.tpe.widen)}) is forcing the query to become dynamic. Try to change its variable to inline in order to fix the issue.") - EntitySummonState.Dynamic(uid, '{ Quoted(${ Lifter(ast) }, ${ Expr.ofList(rawLifts) }, ${ Expr.ofList(runtimeLifts) }) }) - } - else + report.warning( + s"A non-inlined expression (that defines a query for ${Format.TypeRepr(schemaRaw.asTerm.tpe.widen)}) is forcing the query to become dynamic. Try to change its variable to inline in order to fix the issue." + ) + EntitySummonState.Dynamic( + uid, + '{ Quoted(${ Lifter(ast) }, ${ Expr.ofList(rawLifts) }, ${ Expr.ofList(runtimeLifts) }) } + ) + } else EntitySummonState.Static(ast, rawLifts) case _ => @@ -231,7 +240,9 @@ object InsertUpdateMacro { else if (TypeRepr.of[A] <:< TypeRepr.of[Update]) MacroType.Update else - report.throwError(s"Invalid macro action type ${io.getquill.util.Format.TypeOf[A[Any]]} must be either Insert or Update") + report.throwError( + s"Invalid macro action type ${io.getquill.util.Format.TypeOf[A[Any]]} must be either Insert or Update" + ) def summonMetaOfThis() = ofThis() match { case MacroType.Insert => Expr.summon[InsertMeta[T]] @@ -259,11 +270,17 @@ object InsertUpdateMacro { case Tuple(values) if (values.forall(_.isInstanceOf[Property])) => IgnoresSummonState.Static(values.toSet) case other => - report.throwError(s"Invalid values in ${Format.TypeRepr(actionMeta.asTerm.tpe)}: ${other}. An ${Format.TypeRepr(actionMeta.asTerm.tpe)} AST must be a tuple of Property elements.") + report.throwError( + s"Invalid values in ${Format.TypeRepr(actionMeta.asTerm.tpe)}: ${other}. An ${Format + .TypeRepr(actionMeta.asTerm.tpe)} AST must be a tuple of Property elements." + ) } // if the meta is not inline case meta: Expr[InsertMeta[T] | UpdateMeta[T]] => - if (isStatic) report.warning(s"The non-inlined variable `${Format.Expr(actionMeta)}:${Format.TypeRepr(actionMeta.asTerm.tpe.widen)}` will force the query to be dynamic. Try to change it to inline in order to fix the issue.") + if (isStatic) + report.warning( + s"The non-inlined variable `${Format.Expr(actionMeta)}:${Format.TypeRepr(actionMeta.asTerm.tpe.widen)}` will force the query to be dynamic. Try to change it to inline in order to fix the issue." + ) IgnoresSummonState.Dynamic('{ InsertUpdateMacro.getQuotation($meta) }) case null => report.throwError( @@ -278,22 +295,18 @@ object InsertUpdateMacro { } /** - * Inserted object - * can either be static: query[Person]insertValue(Person("Joe", "Bloggs")) - * or it can be lifted: query[Person].insertValue(lift(Person("Joe", "Bloggs"))) + * Inserted object can either be static: + * query[Person]insertValue(Person("Joe", "Bloggs")) or it can be lifted: + * query[Person].insertValue(lift(Person("Joe", "Bloggs"))) * - * In the later case, it will become: - * {{ - * //Assuming x := Person("Joe", "Bloggs") - * CaseClassLift( - * Quoted(ast: CaseClass(name -> lift(idA)), ...), lifts: List(EagerLift(x.name, idA), ...)) - * ) - * }} + * In the later case, it will become: {{ //Assuming x := Person("Joe", + * "Bloggs") CaseClassLift( Quoted(ast: CaseClass(name -> lift(idA)), ...), + * lifts: List(EagerLift(x.name, idA), ...)) ) }} * - * For batch queries liftQuery(people).foreach(p => query[Person].insertValue(p)) - * it will be just the ast Ident("p") + * For batch queries liftQuery(people).foreach(p => + * query[Person].insertValue(p)) it will be just the ast Ident("p") */ - def parseInsertee(insertee: Expr[Any]): CaseClass | AIdent = { + def parseInsertee(insertee: Expr[Any]): CaseClass | AIdent = insertee match { // The case: query[Person].insertValue(lift(Person("Joe", "Bloggs"))) case QuotationLotExpr(exprType) => @@ -303,48 +316,59 @@ object InsertUpdateMacro { case Uprootable.Ast(astExpr) => val ast = Unlifter(astExpr) if (!ast.isInstanceOf[CaseClass]) - report.throwError(s"The lifted insertion element needs to be parsed as a Ast CaseClass but it is: ${ast}") + report.throwError( + s"The lifted insertion element needs to be parsed as a Ast CaseClass but it is: ${ast}" + ) ast.asInstanceOf[CaseClass] case _ => - report.throwError(s"Cannot uproot lifted element. A lifted Insert element e.g. query[T].insertValue(lift(element)) must be lifted directly inside the lift clause. The elment was:\n${insertee.show}") + report.throwError( + s"Cannot uproot lifted element. A lifted Insert element e.g. query[T].insertValue(lift(element)) must be lifted directly inside the lift clause. The elment was:\n${insertee.show}" + ) } // Otherwise the inserted element (i.e. the insertee) is static and should be parsed as an ordinary case class // i.e. the case query[Person]insertValue(Person("Joe", "Bloggs")) (or the batch case) case _ => parseStaticInsertee(insertee) } - } /** - * Parse the input to of query[Person]insertValue(Person("Joe", "Bloggs")) into CaseClass(firstName="Joe",lastName="Bloggs") + * Parse the input to of query[Person]insertValue(Person("Joe", "Bloggs")) + * into CaseClass(firstName="Joe",lastName="Bloggs") */ def parseStaticInsertee(insertee: Expr[_]): CaseClass | AIdent = { val rawAst = parser(insertee) - val ast = BetaReduction(rawAst) + val ast = BetaReduction(rawAst) ast match { case cc: CaseClass => cc case id: AIdent => id - case _ => report.throwError(s"Parsed Insert Macro AST is not a Case Class: ${qprint(ast).plainText} (or a batch-query Ident)") + case _ => + report.throwError( + s"Parsed Insert Macro AST is not a Case Class: ${qprint(ast).plainText} (or a batch-query Ident)" + ) } } /** - * Actually the same as deduceAssignmentsFromCaseClass, but I decided to write - * a separate function and comment it extensively since the logic is not simple to - * extrapolate. - * This function creates a series of assignments - * of a elaborated product. However, each assignment just assigns to the identifier - * which will be plugged in (i.e. BetaReduced) once the Ident is actually substituted. - * E.g. if we have something like this: `val ip = quote { (p: Person) => query[Person].insertValue(p) }` - * and then later: `run(ip(lift(Person("Joe",123))))` then the assignments list is just based - * on the `p` identifier of the `ip` quoted function i.e: - * `(v:Person) => v.firstName -> p.firstName` this is achived by doing - * BetaReduce(v.firstName, v -> p). Later on when `ip(lift(Person("Joe",123)))` - * happens the `CaseClass(firstName -> lift(...), age -> lift(...))` comes in and - * all the right values are plugged in correctly. + * Actually the same as deduceAssignmentsFromCaseClass, but I decided to + * write a separate function and comment it extensively since the logic is + * not simple to extrapolate. This function creates a series of assignments + * of a elaborated product. However, each assignment just assigns to the + * identifier which will be plugged in (i.e. BetaReduced) once the Ident is + * actually substituted. E.g. if we have something like this: `val ip = + * quote { (p: Person) => query[Person].insertValue(p) }` and then later: + * `run(ip(lift(Person("Joe",123))))` then the assignments list is just + * based on the `p` identifier of the `ip` quoted function i.e: `(v:Person) + * \=> v.firstName -> p.firstName` this is achived by doing + * BetaReduce(v.firstName, v -> p). Later on when + * `ip(lift(Person("Joe",123)))` happens the `CaseClass(firstName -> + * lift(...), age -> lift(...))` comes in and all the right values are + * plugged in correctly. */ def deduceAssignmentsFromIdent(insertee: AIdent) = { - val expansionList = ElaborateStructure.ofProductType[T](VIdent.name, ElaborationSide.Encoding) // Elaboration side is Encoding since this is for an entity being inserted + val expansionList = ElaborateStructure.ofProductType[T]( + VIdent.name, + ElaborationSide.Encoding + ) // Elaboration side is Encoding since this is for an entity being inserted def mapping(path: Ast) = { val reduction = BetaReduction(path, VIdent -> insertee) Assignment(VIdent, path, reduction) @@ -357,7 +381,10 @@ object InsertUpdateMacro { def deduceAssignmentsFromCaseClass(insertee: CaseClass) = { // Expand into a AST // T:Person(name:Str, age:Option[Age]) Age(value: Int) -> Ast: List(v.name, v.age.map(v => v.value)) - val expansionList = ElaborateStructure.ofProductType[T](VIdent.name, ElaborationSide.Encoding) // Elaboration side is Encoding since this is for an entity being inserted + val expansionList = ElaborateStructure.ofProductType[T]( + VIdent.name, + ElaborationSide.Encoding + ) // Elaboration side is Encoding since this is for an entity being inserted // Now synthesize (v) => vAssignmentProperty -> assignmentValue // e.g. (v:Person) => v.firstName -> "Joe" @@ -403,16 +430,20 @@ object InsertUpdateMacro { // Pull out the exclusions from the quotation val exclusions = '{ DynamicUtil.retrieveAssignmentTuple($quotation) } // Lift ALL the assignments of the entity - val allAssignmentsLifted = Expr.ofList(assignmentsOfEntity.map(ast => Lifter.NotSerializingAst.assignment(ast))) + val allAssignmentsLifted = + Expr.ofList(assignmentsOfEntity.map(ast => Lifter.NotSerializingAst.assignment(ast))) // Create a statement that represents the filtered assignments during runtime - val liftedFilteredAssignments = '{ $allAssignmentsLifted.filterNot(asi => $exclusions.contains(asi.property)) } + val liftedFilteredAssignments = '{ + $allAssignmentsLifted.filterNot(asi => $exclusions.contains(asi.property)) + } // ... and return the filtered assignments AssignmentList.Dynamic(liftedFilteredAssignments) } /** - * Note that the only reason Parser is needed here is to pass it into parseInsertee. - * The batch pipeline driven by createFromPremade currently doesn't need it. + * Note that the only reason Parser is needed here is to pass it into + * parseInsertee. The batch pipeline driven by createFromPremade currently + * doesn't need it. */ def apply(schemaRaw: Expr[EntityQuery[T]], inserteeRaw: Expr[T]) = { val insertee = inserteeRaw.asTerm.underlyingArgument.asExpr @@ -447,10 +478,17 @@ object InsertUpdateMacro { } /** - * Create a static or dynamic quotation based on the state. Wrap the expr using some additional functions if we need to. - * This is used for the createFromPremade if we need to wrap it into insertReturning which is used for batch-returning query execution. + * Create a static or dynamic quotation based on the state. Wrap the expr + * using some additional functions if we need to. This is used for the + * createFromPremade if we need to wrap it into insertReturning which is + * used for batch-returning query execution. */ - def createQuotation(summonState: EntitySummonState[Ast], assignmentOfEntity: List[Assignment], lifts: List[Expr[Planter[?, ?, ?]]], pluckedUnquotes: List[Expr[QuotationVase]]) = { + def createQuotation( + summonState: EntitySummonState[Ast], + assignmentOfEntity: List[Assignment], + lifts: List[Expr[Planter[?, ?, ?]]], + pluckedUnquotes: List[Expr[QuotationVase]] + ) = { // Processed Assignments AST plus any lifts that may have come from the assignments AST themsevles. // That is usually the case when val assignmentList = processAssignmentsAndExclusions(assignmentOfEntity) @@ -468,7 +506,13 @@ object InsertUpdateMacro { } // Now create the quote and lift the action. This is more efficient then the alternative because the whole action AST can be serialized - val quotation = '{ Quoted[A[T]](${ Lifter(action) }, ${ Expr.ofList(previousLifts ++ lifts) }, ${ Expr.ofList(pluckedUnquotes) }) } + val quotation = '{ + Quoted[A[T]]( + ${ Lifter(action) }, + ${ Expr.ofList(previousLifts ++ lifts) }, + ${ Expr.ofList(pluckedUnquotes) } + ) + } // Unquote the quotation and return quotation @@ -483,9 +527,15 @@ object InsertUpdateMacro { } // create and lift the action - val uid = UUID.randomUUID().toString() + val uid = UUID.randomUUID().toString() val runtimeQuote = '{ QuotationVase($actionQuote, ${ Expr(uid) }) } - val quotation = '{ Quoted[A[T]](QuotationTag(${ Expr(uid) }), ${ Expr.ofList(previousLifts ++ lifts) }, $runtimeQuote +: ${ Expr.ofList(pluckedUnquotes) }) } + val quotation = '{ + Quoted[A[T]]( + QuotationTag(${ Expr(uid) }), + ${ Expr.ofList(previousLifts ++ lifts) }, + $runtimeQuote +: ${ Expr.ofList(pluckedUnquotes) } + ) + } quotation // If we get a dynamic entity back we need to splice things as an Expr even if the assignmentsList is know at compile time @@ -503,7 +553,9 @@ object InsertUpdateMacro { // Create the QuotationVase in which this dynamic quotation will go val runtimeQuote = '{ QuotationVase($entityQuotation, ${ Expr(uid) }) } // Then create the quotation, adding the new runtimeQuote to the list of pluckedUnquotes - val quotation = '{ Quoted[A[T]](${ action }, ${ Expr.ofList(lifts) }, $runtimeQuote +: ${ Expr.ofList(pluckedUnquotes) }) } + val quotation = '{ + Quoted[A[T]](${ action }, ${ Expr.ofList(lifts) }, $runtimeQuote +: ${ Expr.ofList(pluckedUnquotes) }) + } // Unquote the quotation and return quotation } @@ -511,8 +563,12 @@ object InsertUpdateMacro { } // end Pipeline - def static[T: Type, A[T] <: Insert[T] | Update[T]: Type](entityRaw: Expr[EntityQuery[T]], bodyRaw: Expr[T])(using Quotes): Expr[A[T]] = + def static[T: Type, A[T] <: Insert[T] | Update[T]: Type](entityRaw: Expr[EntityQuery[T]], bodyRaw: Expr[T])(using + Quotes + ): Expr[A[T]] = new Pipeline[T, A](true).apply(entityRaw, bodyRaw) - def dynamic[T: Type, A[T] <: Insert[T] | Update[T]: Type](entityRaw: Expr[EntityQuery[T]], bodyRaw: Expr[T])(using Quotes): Expr[A[T]] = + def dynamic[T: Type, A[T] <: Insert[T] | Update[T]: Type](entityRaw: Expr[EntityQuery[T]], bodyRaw: Expr[T])(using + Quotes + ): Expr[A[T]] = new Pipeline[T, A](false).apply(entityRaw, bodyRaw) } diff --git a/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMetaMacro.scala b/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMetaMacro.scala index 33b83c723..f02521b64 100644 --- a/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMetaMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/InsertUpdateMetaMacro.scala @@ -24,7 +24,7 @@ import io.getquill.norm.TranspileConfig object MetaMacro { def apply[T: Type](excludesRaw: Expr[Seq[(T => Any)]])(using Quotes): (Tuple, Expr[String]) = { - val parser = SummonParser().assemble + val parser = SummonParser().assemble given TranspileConfig = SummonTranspileConfig() // Pull out individual args from the apply @@ -43,15 +43,17 @@ object MetaMacro { case Function(List(param), prop @ Property(_, _)) => BetaReduction(prop, param -> InsertUpdateMacro.VIdent) case other => - quotes.reflect.report.throwError(s"Could not recognize insert exclusion AST: ${other} as a valid exclusion AST") + quotes.reflect.report.throwError( + s"Could not recognize insert exclusion AST: ${other} as a valid exclusion AST" + ) } // Shove those into a tuple and return that val excludeTuple = Tuple(excludeAstProps.toList) - val uuid = Expr(java.util.UUID.randomUUID().toString) + val uuid = Expr(java.util.UUID.randomUUID().toString) (excludeTuple, uuid) } // end apply -} // end MetaMacro +} // end MetaMacro object InsertMetaMacro { def apply[T: Type](excludesRaw: Expr[Seq[(T => Any)]])(using Quotes): Expr[InsertMeta[T]] = { diff --git a/quill-sql/src/main/scala/io/getquill/context/LiftMacro.scala b/quill-sql/src/main/scala/io/getquill/context/LiftMacro.scala index e8c63189a..92b00f1d9 100644 --- a/quill-sql/src/main/scala/io/getquill/context/LiftMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/LiftMacro.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -42,7 +42,9 @@ import scala.reflect.ClassTag object LiftQueryMacro { private[getquill] def newUuid = java.util.UUID.randomUUID().toString - def apply[T: Type, U[_] <: Iterable[_]: Type, PrepareRow: Type, Session: Type](entity: Expr[U[T]])(using Quotes): Expr[Query[T]] = { + def apply[T: Type, U[_] <: Iterable[_]: Type, PrepareRow: Type, Session: Type]( + entity: Expr[U[T]] + )(using Quotes): Expr[Query[T]] = { import quotes.reflect._ // check if T is a case-class (e.g. mirrored entity) or a leaf, probably best way to do that val quat = QuatMaking.ofType[T] @@ -51,10 +53,17 @@ object LiftQueryMacro { // Not sure why cast back to iterable is needed here but U param is not needed once it is inside of the planter val (lifterClass, lifters) = LiftMacro.liftInjectedProduct[T, PrepareRow, Session] - val lifterClassExpr = Lifter.caseClass(lifterClass) + val lifterClassExpr = Lifter.caseClass(lifterClass) val liftedLiftersExpr = Expr.ofList(lifters) val returning = - '{ EagerEntitiesPlanter($entity.asInstanceOf[Iterable[T]], ${ Expr(newUuid) }, ${ liftedLiftersExpr }, ${ lifterClassExpr }).unquote } + '{ + EagerEntitiesPlanter( + $entity.asInstanceOf[Iterable[T]], + ${ Expr(newUuid) }, + ${ liftedLiftersExpr }, + ${ lifterClassExpr } + ).unquote + } returning case _ => val encoder = LiftMacro.summonEncoderOrFail[T, PrepareRow, Session](entity) @@ -66,7 +75,7 @@ object LiftQueryMacro { object LiftMacro { private[getquill] def newUuid = java.util.UUID.randomUUID().toString - private[getquill] val VIdent = AIdent("v", Quat.Generic) + private[getquill] val VIdent = AIdent("v", Quat.Generic) def apply[T: Type, PrepareRow: Type, Session: Type](entity: Expr[T])(using Quotes): Expr[T] = { import quotes.reflect._ @@ -84,8 +93,14 @@ object LiftMacro { // TODO Move this method to testing code since this method is only accessed by other macros in the source // TODO Use this functionality to test the internals of liftInjectedProduct i.e. see if the correct encoders are summoned - inline def liftInjectedProductExternal[T, PrepareRow]: List[(String, T => Any)] = ${ liftInjectedProductExternalImpl[T, PrepareRow] } - def liftInjectedProductExternalImpl[T, PrepareRow](using qctx: Quotes, tpe: Type[T], prepareRowTpe: Type[PrepareRow]): Expr[List[(String, T => Any)]] = + inline def liftInjectedProductExternal[T, PrepareRow]: List[(String, T => Any)] = ${ + liftInjectedProductExternalImpl[T, PrepareRow] + } + def liftInjectedProductExternalImpl[T, PrepareRow](using + qctx: Quotes, + tpe: Type[T], + prepareRowTpe: Type[PrepareRow] + ): Expr[List[(String, T => Any)]] = Expr.ofList { liftInjectedProductComponents[T, PrepareRow]._2.map { elem => '{ (${ Expr(elem._1) }, ${ elem._2 }) } @@ -93,10 +108,16 @@ object LiftMacro { } // TODO Injected => Injectable - private[getquill] def liftInjectedProduct[T, PrepareRow, Session](using qctx: Quotes, tpe: Type[T], prepareRowTpe: Type[PrepareRow], sessionTpe: Type[Session]): (CaseClass, List[Expr[InjectableEagerPlanter[_, PrepareRow, Session]]]) = { + private[getquill] def liftInjectedProduct[T, PrepareRow, Session](using + qctx: Quotes, + tpe: Type[T], + prepareRowTpe: Type[PrepareRow], + sessionTpe: Type[Session] + ): (CaseClass, List[Expr[InjectableEagerPlanter[_, PrepareRow, Session]]]) = { import qctx.reflect._ val (caseClassAstInitial, liftsInitial) = liftInjectedProductComponents[T, PrepareRow] - val TaggedLiftedCaseClass(caseClassAst, lifts) = TaggedLiftedCaseClass(caseClassAstInitial, liftsInitial).reKeyWithUids() + val TaggedLiftedCaseClass(caseClassAst, lifts) = + TaggedLiftedCaseClass(caseClassAstInitial, liftsInitial).reKeyWithUids() val liftPlanters = lifts.map { (liftKey, lift) => // since we don't have an implicit Type for every single lift, we need to pull out each of their TypeReprs convert them to Type and manually pass them in @@ -104,21 +125,28 @@ object LiftMacro { val liftType = lift.asTerm.tpe.widen.asType liftType match { case '[T => liftT] => - injectableLiftValue[liftT, PrepareRow, Session](lift.asExprOf[T => liftT], liftKey) // Note: if want to get this to work, try doing 'summon[Type[liftT]]' (using liftType, prepareRowTpe, quotes) + injectableLiftValue[liftT, PrepareRow, Session]( + lift.asExprOf[T => liftT], + liftKey + ) // Note: if want to get this to work, try doing 'summon[Type[liftT]]' (using liftType, prepareRowTpe, quotes) } } (caseClassAst, liftPlanters) } - private[getquill] def liftInjectedProductComponents[T, PrepareRow](using qctx: Quotes, tpe: Type[T], prepareRowTpe: Type[PrepareRow]): (CaseClass, List[(String, Expr[T => _])]) = { + private[getquill] def liftInjectedProductComponents[T, PrepareRow](using + qctx: Quotes, + tpe: Type[T], + prepareRowTpe: Type[PrepareRow] + ): (CaseClass, List[(String, Expr[T => _])]) = { import qctx.reflect._ import scala.quoted._ import io.getquill.util.Format // Get the elaboration and AST once so that it will not have to be parsed out of the liftedCombo (since they are normally returned by ElaborateStructure.ofProductValue) - val elaborated = ElaborateStructure.Term.ofProduct[T](ElaborationSide.Encoding) + val elaborated = ElaborateStructure.Term.ofProduct[T](ElaborationSide.Encoding) val (_, caseClassAst) = ElaborateStructure.productValueToAst[T](elaborated) - val caseClass = caseClassAst.asInstanceOf[io.getquill.ast.CaseClass] + val caseClass = caseClassAst.asInstanceOf[io.getquill.ast.CaseClass] // Get just the labels. We need to pass in an instance of T which we don't actually use // so it's fine to pass in anything but technically that is a bit inelegant. @@ -134,20 +162,22 @@ object LiftMacro { // ... // and the respectively pull out lift(singleArg.foo), lift(singleArg.bar), etc... from that clause turning it into // (singleArg) => lift(singleArg.foo), (singleArg) => lift(singleArg.bar), (singleArg) => etc... so that everything remains phase consistent - val liftLambdasAndTypes = ElaborateStructure.decomposedProductValue[T](ElaborationSide.Encoding) // Elaboration side is 'Encoding' since we are in the lift macro + val liftLambdasAndTypes = ElaborateStructure.decomposedProductValue[T]( + ElaborationSide.Encoding + ) // Elaboration side is 'Encoding' since we are in the lift macro val liftLambdas = liftLambdasAndTypes.map(_._1) - val exprTypes = liftLambdasAndTypes.map(_._2) + val exprTypes = liftLambdasAndTypes.map(_._2) def liftCombo[Output: Type](index: Int) = '{ (entity: T) => ${ liftLambdas(index).apply('entity) }.asInstanceOf[Output] } val output = - labels.zipWithIndex.map((label, index) => { + labels.zipWithIndex.map { (label, index) => exprTypes(index) match { case '[tt] => (label, liftCombo[tt](index)) } - }) + } (caseClass, output) @@ -162,10 +192,16 @@ object LiftMacro { // Lambda(Symbol.spliceOwner, mt, (owner,args) => body.changeOwner(owner)) } - private[getquill] def liftProduct[T, PrepareRow, Session](productEntity: Expr[T])(using qctx: Quotes, tpe: Type[T], prepareRowTpe: Type[PrepareRow], sessionTpe: Type[Session]): Expr[CaseClassLift[T]] = { + private[getquill] def liftProduct[T, PrepareRow, Session](productEntity: Expr[T])(using + qctx: Quotes, + tpe: Type[T], + prepareRowTpe: Type[PrepareRow], + sessionTpe: Type[Session] + ): Expr[CaseClassLift[T]] = { import qctx.reflect._ // Elaborate the entity and get it's lift. Since we are in the lifter, the elabration side is the encoding side (i.e. since lifts are doing Encoding). - val TaggedLiftedCaseClass(caseClassAst, lifts) = ElaborateStructure.ofProductValue[T](productEntity, ElaborationSide.Encoding).reKeyWithUids() + val TaggedLiftedCaseClass(caseClassAst, lifts) = + ElaborateStructure.ofProductValue[T](productEntity, ElaborationSide.Encoding).reKeyWithUids() val liftPlanters = lifts.map { (liftKey, lift) => // since we don't have an implicit Type for every single lift, we need to pull out each of their TypeReprs convert them to Type and manually pass them in @@ -173,38 +209,60 @@ object LiftMacro { val liftType = lift.asTerm.tpe.widen.asType liftType match { case '[liftT] => - liftValue[liftT, PrepareRow, Session](lift.asExprOf[liftT], liftKey) // Note: if want to get this to work, try doing 'summon[Type[liftT]]' (using liftType, prepareRowTpe, quotes) + liftValue[liftT, PrepareRow, Session]( + lift.asExprOf[liftT], + liftKey + ) // Note: if want to get this to work, try doing 'summon[Type[liftT]]' (using liftType, prepareRowTpe, quotes) } } val quotation = '{ Quoted[T](${ Lifter(caseClassAst) }, ${ Expr.ofList(liftPlanters) }, Nil) } - '{ CaseClassLift[T]($quotation, ${ Expr(java.util.UUID.randomUUID.toString) }) } // NOTE UUID technically not needed here. Can try to remove it later + '{ + CaseClassLift[T]($quotation, ${ Expr(java.util.UUID.randomUUID.toString) }) + } // NOTE UUID technically not needed here. Can try to remove it later } - private[getquill] def summonEncoderOrFail[T: Type, PrepareRow: Type, Session: Type](loggingEntity: Expr[_])(using Quotes) = { + private[getquill] def summonEncoderOrFail[T: Type, PrepareRow: Type, Session: Type]( + loggingEntity: Expr[_] + )(using Quotes) = { import quotes.reflect._ Expr.summon[GenericEncoder[T, PrepareRow, Session]] match { case Some(enc) => enc - case None => report.throwError(s"Cannot Find a '${Printer.TypeReprCode.show(TypeRepr.of[T])}' Encoder of ${Printer.TreeShortCode.show(loggingEntity.asTerm)}", loggingEntity) + case None => + report.throwError( + s"Cannot Find a '${Printer.TypeReprCode.show(TypeRepr.of[T])}' Encoder of ${Printer.TreeShortCode + .show(loggingEntity.asTerm)}", + loggingEntity + ) } } - private[getquill] def liftValue[T: Type, PrepareRow: Type, Session: Type](valueEntity: Expr[T], uuid: String = newUuid)(using Quotes) /*: Expr[EagerPlanter[T, PrepareRow]]*/ = { + private[getquill] def liftValue[T: Type, PrepareRow: Type, Session: Type]( + valueEntity: Expr[T], + uuid: String = newUuid + )(using Quotes) /*: Expr[EagerPlanter[T, PrepareRow]]*/ = { import quotes.reflect._ val encoder = summonEncoderOrFail[T, PrepareRow, Session](valueEntity) - '{ EagerPlanter($valueEntity, $encoder, ${ Expr(uuid) }) } // [T, PrepareRow] // adding these causes assertion failed: unresolved symbols: value Context_this + '{ + EagerPlanter($valueEntity, $encoder, ${ Expr(uuid) }) + } // [T, PrepareRow] // adding these causes assertion failed: unresolved symbols: value Context_this } - def valueOrString[T: Type, PrepareRow: Type, Session: Type](valueEntity: Expr[Any], uuid: String = newUuid)(using Quotes) = { + def valueOrString[T: Type, PrepareRow: Type, Session: Type](valueEntity: Expr[Any], uuid: String = newUuid)(using + Quotes + ) = { import quotes.reflect._ // i.e. the actual thing being passed to the encoder e.g. for lift(foo.bar) this will be "foo.bar" val fieldName = Format.Expr(valueEntity) // The thing being encoded converted to a string, unless it is null then null is returned val valueEntityToString = '{ StringOrNull($valueEntity) } - val nullableEncoder = summonEncoderOrFail[Option[T], PrepareRow, Session](valueEntity) + val nullableEncoder = summonEncoderOrFail[Option[T], PrepareRow, Session](valueEntity) val expectedClassTag = Expr.summon[ClassTag[T]] match { case Some(value) => value - case None => report.throwError(s"Cannot create a classTag for the type ${Format.TypeOf[T]} for the value ${fieldName}. Cannot create a string-fallback encoder.") + case None => + report.throwError( + s"Cannot create a classTag for the type ${Format.TypeOf[T]} for the value ${fieldName}. Cannot create a string-fallback encoder." + ) } val converterExpr: Expr[Either[String, FromString[T]]] = StringCodec.FromString.summonExpr[T] match { @@ -220,23 +278,32 @@ object LiftMacro { } } - private[getquill] def injectableLiftValue[T: Type, PrepareRow: Type, Session: Type](valueEntity: Expr[_ => T], uuid: String = newUuid)(using Quotes) /*: Expr[EagerPlanter[T, PrepareRow]]*/ = { + private[getquill] def injectableLiftValue[T: Type, PrepareRow: Type, Session: Type]( + valueEntity: Expr[_ => T], + uuid: String = newUuid + )(using Quotes) /*: Expr[EagerPlanter[T, PrepareRow]]*/ = { import quotes.reflect._ val encoder = Expr.summon[GenericEncoder[T, PrepareRow, Session]] match { case Some(enc) => enc - case None => report.throwError( - s"Cannot inject the value: ${io.getquill.util.Format.Expr(valueEntity)}.Cannot Find a '${Printer.TypeReprCode.show(TypeRepr.of[T])}' Encoder of ${Printer.TreeShortCode.show(valueEntity.asTerm)}", + case None => + report.throwError( + s"Cannot inject the value: ${io.getquill.util.Format.Expr(valueEntity)}.Cannot Find a '${Printer.TypeReprCode + .show(TypeRepr.of[T])}' Encoder of ${Printer.TreeShortCode.show(valueEntity.asTerm)}", valueEntity ) } - '{ InjectableEagerPlanter($valueEntity, $encoder, ${ Expr(uuid) }) } // [T, PrepareRow] // adding these causes assertion failed: unresolved symbols: value Context_this + '{ + InjectableEagerPlanter($valueEntity, $encoder, ${ Expr(uuid) }) + } // [T, PrepareRow] // adding these causes assertion failed: unresolved symbols: value Context_this } def applyLazy[T, PrepareRow](valueEntity: Expr[T])(using Quotes, Type[T], Type[PrepareRow]): Expr[T] = { import quotes.reflect._ val uuid = java.util.UUID.randomUUID().toString - '{ LazyPlanter($valueEntity, ${ Expr(uuid) }).unquote } // [T, PrepareRow] // adding these causes assertion failed: unresolved symbols: value Context_this + '{ + LazyPlanter($valueEntity, ${ Expr(uuid) }).unquote + } // [T, PrepareRow] // adding these causes assertion failed: unresolved symbols: value Context_this } } diff --git a/quill-sql/src/main/scala/io/getquill/context/LiftsExtractor.scala b/quill-sql/src/main/scala/io/getquill/context/LiftsExtractor.scala index 04a73277b..232d16057 100644 --- a/quill-sql/src/main/scala/io/getquill/context/LiftsExtractor.scala +++ b/quill-sql/src/main/scala/io/getquill/context/LiftsExtractor.scala @@ -10,16 +10,24 @@ import io.getquill.Planter import io.getquill.ast.Ast object LiftsExtractor { - /** For Dynamic queries, lazy lifts are not allowed. If one is encountered, fail */ + + /** + * For Dynamic queries, lazy lifts are not allowed. If one is encountered, + * fail + */ object Dynamic { def apply[PrepareRowTemp, Session](allLifts: List[Planter[_, _, _]], row: PrepareRowTemp, session: Session) = { val lifts = allLifts.map { case e: EagerPlanter[_, _, _] => e case e: EagerListPlanter[_, _, _] => e case l: LazyPlanter[_, _, _] => - throw new IllegalArgumentException(s"The value ${l.value} has a lazy lift which was spliced into a Dynamic Query. Lazy Lifts are only allowed for Compile-Time queries.") + throw new IllegalArgumentException( + s"The value ${l.value} has a lazy lift which was spliced into a Dynamic Query. Lazy Lifts are only allowed for Compile-Time queries." + ) case other => - throw new IllegalStateException(s"Found an illegal lift planter ${other} during lift extraction. All injectable and lazy lifts must have been resolved at this point.") + throw new IllegalStateException( + s"Found an illegal lift planter ${other} during lift extraction. All injectable and lazy lifts must have been resolved at this point." + ) } LiftsExtractor.apply(lifts, row, session) } @@ -28,7 +36,10 @@ object LiftsExtractor { def apply[PrepareRowTemp, Session](lifts: List[Planter[_, _, _]], row: PrepareRowTemp, session: Session) = { def encodeSingleElement(lift: EagerPlanter[_, _, _], idx: Int, row: PrepareRowTemp): (Int, PrepareRowTemp, Any) = { - val prepRow = lift.asInstanceOf[EagerPlanter[Any, PrepareRowTemp, Session]].encoder(idx, lift.value, row, session).asInstanceOf[PrepareRowTemp] + val prepRow = lift + .asInstanceOf[EagerPlanter[Any, PrepareRowTemp, Session]] + .encoder(idx, lift.value, row, session) + .asInstanceOf[PrepareRowTemp] (1, prepRow, lift.value) } @@ -39,7 +50,11 @@ object LiftsExtractor { // since the number of Question marks is already expanded (i.e. from the Unparticular.Query where it's just // one for the IN clause "WHERE p.name IN (?)" to the particular query where it's the number of elements // in the list i.e. "WHERE p.name IN (?, ?)") - def encodeElementList(lift: EagerListPlanter[_, _, _], idx: Int, row: PrepareRowTemp): (Int, PrepareRowTemp, Any) = { + def encodeElementList( + lift: EagerListPlanter[_, _, _], + idx: Int, + row: PrepareRowTemp + ): (Int, PrepareRowTemp, Any) = { val listPlanter = lift.asInstanceOf[EagerListPlanter[Any, PrepareRowTemp, Session]] val prepRow = listPlanter.values.zipWithIndex.foldLeft(row) { case (newRow, (value, listIndex)) => @@ -59,17 +74,18 @@ object LiftsExtractor { // Then: b.encoder(1, b.value [i.e. "bar"], row) -> (0, "bar" :: "foo" :: Nil, row) // etc... val (_, values, prepare) = - lifts.foldLeft((0, List.empty[Any], row)) { - case ((idx, values, row), lift) => - val (increment, newRow, value) = - lift match { - case eager: EagerPlanter[_, _, _] => encodeSingleElement(eager, idx, row) - case eagerList: EagerListPlanter[_, _, _] => encodeElementList(eagerList, idx, row) - case _ => - throw new IllegalArgumentException(s"Lifts must be extracted from EagerLift or EagerList Lift but ${lift} found") - } + lifts.foldLeft((0, List.empty[Any], row)) { case ((idx, values, row), lift) => + val (increment, newRow, value) = + lift match { + case eager: EagerPlanter[_, _, _] => encodeSingleElement(eager, idx, row) + case eagerList: EagerListPlanter[_, _, _] => encodeElementList(eagerList, idx, row) + case _ => + throw new IllegalArgumentException( + s"Lifts must be extracted from EagerLift or EagerList Lift but ${lift} found" + ) + } - (idx + increment, value :: values, newRow) + (idx + increment, value :: values, newRow) } (values, prepare) } diff --git a/quill-sql/src/main/scala/io/getquill/context/Particularize.scala b/quill-sql/src/main/scala/io/getquill/context/Particularize.scala index bce4bbbb7..c85a8bcfe 100644 --- a/quill-sql/src/main/scala/io/getquill/context/Particularize.scala +++ b/quill-sql/src/main/scala/io/getquill/context/Particularize.scala @@ -29,42 +29,56 @@ import io.getquill.util.Interpolator import io.getquill.parser.Lifters /** - * For a query that has a filter(p => liftQuery(List("Joe","Jack")).contains(p.name)) we need to turn - * the "WHERE p.name in (?)" into WHERE p.name in (?, ?) i.e. to "Particularize" the query - * to the number of elements in the query lift. In Scala2-Quill we could just access the values - * of the liftQuery list directly since the lift was an 'Any' value directly in the AST. - * In Scala 3 however, we need to treat the lifted list as an Expr and create an Expr[String] - * that represents the Query that is to be during runtime based on the content of the list - * which has to be manipulated inside of a '{ ... } block. + * For a query that has a filter(p => + * liftQuery(List("Joe","Jack")).contains(p.name)) we need to turn the "WHERE + * p.name in (?)" into WHERE p.name in (?, ?) i.e. to "Particularize" the query + * to the number of elements in the query lift. In Scala2-Quill we could just + * access the values of the liftQuery list directly since the lift was an 'Any' + * value directly in the AST. In Scala 3 however, we need to treat the lifted + * list as an Expr and create an Expr[String] that represents the Query that is + * to be during runtime based on the content of the list which has to be + * manipulated inside of a '{ ... } block. */ object Particularize { // the following should test for that: update - extra lift + scalars + liftQuery/setContains object Static { - /** Convenience constructor for doing particularization from an Unparticular.Query */ + + /** + * Convenience constructor for doing particularization from an + * Unparticular.Query + */ def apply[PrepareRowTemp: Type]( - query: Unparticular.Query, - lifts: List[Expr[Planter[_, _, _]]], - runtimeLiftingPlaceholder: Expr[Int => String], - emptySetContainsToken: Expr[Token => Token], - valuesClauseRepeats: Expr[Int] + query: Unparticular.Query, + lifts: List[Expr[Planter[_, _, _]]], + runtimeLiftingPlaceholder: Expr[Int => String], + emptySetContainsToken: Expr[Token => Token], + valuesClauseRepeats: Expr[Int] )(traceConfig: TraceConfig)(using Quotes): Expr[String] = { import quotes.reflect._ val liftsExpr: Expr[List[Planter[?, ?, ?]]] = Expr.ofList(lifts) - val queryExpr: Expr[Unparticular.Query] = UnparticularQueryLiftable(query) - val traceConfigExpr = TranspileConfigLiftable(traceConfig) - '{ Dynamic[PrepareRowTemp]($queryExpr, $liftsExpr, $runtimeLiftingPlaceholder, $emptySetContainsToken)($traceConfigExpr)._1 } + val queryExpr: Expr[Unparticular.Query] = UnparticularQueryLiftable(query) + val traceConfigExpr = TranspileConfigLiftable(traceConfig) + '{ + Dynamic[PrepareRowTemp]($queryExpr, $liftsExpr, $runtimeLiftingPlaceholder, $emptySetContainsToken)( + $traceConfigExpr + )._1 + } } } // end Static object Dynamic { - /** Convenience constructor for doing particularization from an Unparticular.Query */ + + /** + * Convenience constructor for doing particularization from an + * Unparticular.Query + */ def apply[PrepareRowTemp]( - query: Unparticular.Query, - lifts: List[Planter[_, _, _]], - liftingPlaceholder: Int => String, - emptySetContainsToken: Token => Token, - valuesClauseRepeats: Int = 1 + query: Unparticular.Query, + lifts: List[Planter[_, _, _]], + liftingPlaceholder: Int => String, + emptySetContainsToken: Token => Token, + valuesClauseRepeats: Int = 1 )(traceConfig: TraceConfig): (String, LiftsOrderer) = new Dynamic(traceConfig)(query.realQuery, lifts, liftingPlaceholder, emptySetContainsToken, valuesClauseRepeats) } @@ -73,19 +87,33 @@ object Particularize { val interp = new Interpolator(TraceType.Particularization, traceConfig, 1) import interp._ - def apply(statements: Statement, lifts: List[Planter[_, _, _]], liftingPlaceholder: Int => String, emptySetContainsToken: Token => Token, valuesClauseRepeats: Int): (String, LiftsOrderer) = { + def apply( + statements: Statement, + lifts: List[Planter[_, _, _]], + liftingPlaceholder: Int => String, + emptySetContainsToken: Token => Token, + valuesClauseRepeats: Int + ): (String, LiftsOrderer) = { enum LiftChoice { case ListLift(value: EagerListPlanter[Any, PrepareRowTemp, Session]) case SingleLift(value: Planter[Any, PrepareRowTemp, Session]) case InjectableLift(value: Planter[Any, PrepareRowTemp, Session]) } - val listLifts = lifts.collect { case e: EagerListPlanter[_, _, _] => e.asInstanceOf[EagerListPlanter[Any, PrepareRowTemp, Session]] }.map(lift => (lift.uid, lift)).toMap - val singleLifts = lifts.collect { case e: EagerPlanter[_, _, _] => e.asInstanceOf[EagerPlanter[Any, PrepareRowTemp, Session]] }.map(lift => (lift.uid, lift)).toMap - val injectableLifts = lifts.collect { case e: InjectableEagerPlanter[_, _, _] => e.asInstanceOf[InjectableEagerPlanter[Any, PrepareRowTemp, Session]] }.map(lift => (lift.uid, lift)).toMap + val listLifts = lifts.collect { case e: EagerListPlanter[_, _, _] => + e.asInstanceOf[EagerListPlanter[Any, PrepareRowTemp, Session]] + }.map(lift => (lift.uid, lift)).toMap + val singleLifts = lifts.collect { case e: EagerPlanter[_, _, _] => + e.asInstanceOf[EagerPlanter[Any, PrepareRowTemp, Session]] + }.map(lift => (lift.uid, lift)).toMap + val injectableLifts = lifts.collect { case e: InjectableEagerPlanter[_, _, _] => + e.asInstanceOf[InjectableEagerPlanter[Any, PrepareRowTemp, Session]] + }.map(lift => (lift.uid, lift)).toMap def getLifts(uid: String): LiftChoice = - listLifts.get(uid).map(LiftChoice.ListLift(_)) + listLifts + .get(uid) + .map(LiftChoice.ListLift(_)) .orElse(singleLifts.get(uid).map(LiftChoice.SingleLift(_))) .orElse(injectableLifts.get(uid).map(LiftChoice.InjectableLift(_))) .getOrElse { @@ -116,20 +144,20 @@ object Particularize { } trait Work - case class Item(token: io.getquill.idiom.Token) extends Work - case class SetValueClauseNum(num: Int) extends Work + case class Item(token: io.getquill.idiom.Token) extends Work + case class SetValueClauseNum(num: Int) extends Work case class DoneValueClauseNum(num: Int, isLast: Boolean) extends Work def token2String(token: io.getquill.idiom.Token): (String, LiftsOrderer) = { trace"Tokenization for query: $token".andLog() @tailrec def apply( - workList: Chunk[Work], - sqlResult: Chunk[String], - lifts: Chunk[LiftSlot], - liftsCount: Int, // I.e. the index of the '?' that is inserted in the query (that represents a lift) - valueClausesIndex: Int - ): (String, LiftsOrderer) = { + workList: Chunk[Work], + sqlResult: Chunk[String], + lifts: Chunk[LiftSlot], + liftsCount: Int, // I.e. the index of the '?' that is inserted in the query (that represents a lift) + valueClausesIndex: Int + ): (String, LiftsOrderer) = // Completed all work if (workList.isEmpty) { val query = sqlResult.foldLeft("")((concatonation, nextExpr) => concatonation + nextExpr) @@ -157,12 +185,19 @@ object Particularize { LiftSlot.makePlain(tag) } - apply(tail, sqlResult :+ liftPlaceholders, lifts :+ newLift, liftsCount + liftsLength, valueClausesIndex) + apply( + tail, + sqlResult :+ liftPlaceholders, + lifts :+ newLift, + liftsCount + liftsLength, + valueClausesIndex + ) case Item(ValuesClauseToken(stmt)) => val repeatedClauses = - (0 until valuesClauseRepeats) - .toChunk - .mapWithHasNext((i, hasNext) => List(SetValueClauseNum(i), Item(stmt), DoneValueClauseNum(i, !hasNext))) + (0 until valuesClauseRepeats).toChunk + .mapWithHasNext((i, hasNext) => + List(SetValueClauseNum(i), Item(stmt), DoneValueClauseNum(i, !hasNext)) + ) .flatten trace"Instructions for releated clauses: ${repeatedClauses}".andLog() @@ -170,7 +205,9 @@ object Particularize { case Item(Statement(tokens)) => apply(tokens.toChunk.map(Item(_)) ++ tail, sqlResult, lifts, liftsCount, valueClausesIndex) case Item(_: ScalarLiftToken) => - throw new UnsupportedOperationException("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.") + throw new UnsupportedOperationException( + "Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens." + ) case Item(_: QuotationTagToken) => throw new UnsupportedOperationException("Quotation Tags must be resolved before a reification.") case SetValueClauseNum(num) => @@ -186,7 +223,6 @@ object Particularize { apply(reaminingWork, sqlResult, lifts, liftsCount, num) } } - } apply(Chunk.single(Item(token)), Chunk.empty, Chunk.empty, 0, 0) } @@ -199,7 +235,7 @@ object Particularize { } private implicit class ChunkExtensions[A](val as: Chunk[A]) extends AnyVal { def mapWithHasNext[B](f: (A, Boolean) => B): Chunk[B] = { - val b = Chunk.newBuilder[B] + val b = Chunk.newBuilder[B] val it = as.iterator if (it.hasNext) { b += f(it.next(), it.hasNext) @@ -215,9 +251,9 @@ object Particularize { object LiftSlot { enum Rank { case Numbered(num: Int) // for values-clauses - case Universal // for regular lifts + case Universal // for regular lifts } - def makePlain(lift: ScalarTag) = LiftSlot(Rank.Universal, lift) + def makePlain(lift: ScalarTag) = LiftSlot(Rank.Universal, lift) def makeNumbered(number: Int, lift: ScalarTag) = LiftSlot(Rank.Numbered(number), lift) object Numbered { def unapply(liftSlot: LiftSlot) = @@ -242,11 +278,8 @@ object Particularize { case class ValueLiftKey(i: Int, uid: String) def orderLifts(valueClauseLifts: List[SingleEntityLifts], regularLifts: List[Planter[?, ?, ?]]) = { val valueClauseLiftIndexes = - valueClauseLifts - .zipWithIndex - .flatMap((entity, i) => - entity.lifts.map(lift => ValueLiftKey(i, lift.uid) -> lift) - ) + valueClauseLifts.zipWithIndex + .flatMap((entity, i) => entity.lifts.map(lift => ValueLiftKey(i, lift.uid) -> lift)) .toMap val regularLiftIndexes = regularLifts.map(lift => (lift.uid, lift)).toMap @@ -257,13 +290,17 @@ object Particularize { valueClauseLiftIndexes .get(ValueLiftKey(valueClauseNum, uid)) .getOrElse { - throw new IllegalStateException(s"Could not find the Value-Clause lift index:${valueClauseNum},uid:${uid}. Existing values are: ${valueClauseLiftIndexes}") + throw new IllegalStateException( + s"Could not find the Value-Clause lift index:${valueClauseNum},uid:${uid}. Existing values are: ${valueClauseLiftIndexes}" + ) } case LiftSlot.Plain(uid) => regularLiftIndexes .get(uid) .getOrElse { - throw new IllegalStateException(s"Could not find the lift uid:${uid},uid:${uid}. Existing values are: ${regularLiftIndexes}") + throw new IllegalStateException( + s"Could not find the lift uid:${uid},uid:${uid}. Existing values are: ${regularLiftIndexes}" + ) } case other => throw new IllegalStateException(s"Illegal LiftSlot: ${other}") @@ -272,20 +309,19 @@ object Particularize { } private[getquill] object UnparticularQueryLiftable { - def apply(token: Unparticular.Query)(using Quotes) = liftUnparticularQuery(token) + def apply(token: Unparticular.Query)(using Quotes) = liftUnparticularQuery(token) extension [T](t: T)(using ToExpr[T], Quotes) def expr: Expr[T] = Expr(t) import io.getquill.parser.Lifters.Plain given liftUnparticularQuery: Lifters.Plain[Unparticular.Query] with { - def lift = { - case Unparticular.Query(basicQuery: String, realQuery: Statement) => - '{ Unparticular.Query(${ basicQuery.expr }, ${ StatementLiftable(realQuery) }) } + def lift = { case Unparticular.Query(basicQuery: String, realQuery: Statement) => + '{ Unparticular.Query(${ basicQuery.expr }, ${ StatementLiftable(realQuery) }) } } } } // end UnparticularQueryLiftable private[getquill] object StatementLiftable { - def apply(token: Statement)(using Quotes) = liftStatement(token) + def apply(token: Statement)(using Quotes) = liftStatement(token) extension [T](t: T)(using ToExpr[T], Quotes) def expr: Expr[T] = Expr(t) import io.getquill.parser.Lifters.Plain @@ -294,20 +330,24 @@ object Particularize { // Note strange errors about SerializeHelper.fromSerialized types can happen here if NotSerializing is not true. // Anyway we do not want tag-serialization here for the sake of simplicity for the tokenization which happens at runtime. // AST serialization is generally used to make unlifting deeply nested ASTs simpler but Quotation/Scalar Tags are only 1-level deep. - case ScalarTagToken(lift: ScalarTag) => '{ io.getquill.idiom.ScalarTagToken(${ Lifter.NotSerializing.scalarTag(lift) }) } - case QuotationTagToken(lift: QuotationTag) => '{ io.getquill.idiom.QuotationTagToken(${ Lifter.NotSerializing.quotationTag(lift) }) } - case StringToken(string) => '{ io.getquill.idiom.StringToken(${ string.expr }) } - case s: Statement => liftStatement(s) - case SetContainsToken(a, op, b) => '{ io.getquill.idiom.SetContainsToken(${ a.expr }, ${ op.expr }, ${ b.expr }) } - case ScalarLiftToken(lift) => quotes.reflect.report.throwError("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.") - case ValuesClauseToken(stmt) => '{ io.getquill.idiom.ValuesClauseToken(${ stmt.expr }) } + case ScalarTagToken(lift: ScalarTag) => + '{ io.getquill.idiom.ScalarTagToken(${ Lifter.NotSerializing.scalarTag(lift) }) } + case QuotationTagToken(lift: QuotationTag) => + '{ io.getquill.idiom.QuotationTagToken(${ Lifter.NotSerializing.quotationTag(lift) }) } + case StringToken(string) => '{ io.getquill.idiom.StringToken(${ string.expr }) } + case s: Statement => liftStatement(s) + case SetContainsToken(a, op, b) => + '{ io.getquill.idiom.SetContainsToken(${ a.expr }, ${ op.expr }, ${ b.expr }) } + case ScalarLiftToken(lift) => + quotes.reflect.report.throwError("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.") + case ValuesClauseToken(stmt) => '{ io.getquill.idiom.ValuesClauseToken(${ stmt.expr }) } } } given liftStatement: Lifters.Plain[Statement] with { - def lift = { - case Statement(tokens) => '{ io.getquill.idiom.Statement(${ tokens.expr }) } + def lift = { case Statement(tokens) => + '{ io.getquill.idiom.Statement(${ tokens.expr }) } } } } // end StatementLiftable -} // end Particularize +} // end Particularize diff --git a/quill-sql/src/main/scala/io/getquill/context/ProtoContextSecundus.scala b/quill-sql/src/main/scala/io/getquill/context/ProtoContextSecundus.scala index c2aa5dec4..c519cda74 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ProtoContextSecundus.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ProtoContextSecundus.scala @@ -7,15 +7,17 @@ import io.getquill.ast.Ast import io.getquill.quat.Quat /** - * A common context used between Quill and ProtoQuill. This is more like a pre-context because the actual `run` - * methods cannot be contained here since they use macros. Right now not all Scala2-Quill context extend - * this context but hopefully they will all in the future. This will establish a common general-api that - * Quill contexts can use. - * In ProtoQuill, this context is used for the base of all other context and allows the Scala 3 macros - * to call the `execute___` methods. In Scala2-Quill wherein macros are less strict about signatures, - * this cannot be used for `Context` (in `Context.scala`) but various higher-level context extend it - * as a guard-rail against API drift i.e. so that the Scala2-Quill and ProtoQuill internal-context - * APIs remain largely the same. + * A common context used between Quill and ProtoQuill. This is more like a + * pre-context because the actual `run` methods cannot be contained here since + * they use macros. Right now not all Scala2-Quill context extend this context + * but hopefully they will all in the future. This will establish a common + * general-api that Quill contexts can use. In ProtoQuill, this context is used + * for the base of all other context and allows the Scala 3 macros to call the + * `execute___` methods. In Scala2-Quill wherein macros are less strict about + * signatures, this cannot be used for `Context` (in `Context.scala`) but + * various higher-level context extend it as a guard-rail against API drift i.e. + * so that the Scala2-Quill and ProtoQuill internal-context APIs remain largely + * the same. */ trait ProtoContextSecundus[+Dialect <: io.getquill.idiom.Idiom, +Naming <: NamingStrategy] extends RowContext { type PrepareRow @@ -36,29 +38,53 @@ trait ProtoContextSecundus[+Dialect <: io.getquill.idiom.Idiom, +Naming <: Namin def idiom: Dialect def naming: Naming - def executeQuery[T](sql: String, prepare: Prepare, extractor: Extractor[T])(executionInfo: ExecutionInfo, rn: Runner): Result[RunQueryResult[T]] - def executeQuerySingle[T](string: String, prepare: Prepare, extractor: Extractor[T])(executionInfo: ExecutionInfo, rn: Runner): Result[RunQuerySingleResult[T]] + def executeQuery[T](sql: String, prepare: Prepare, extractor: Extractor[T])( + executionInfo: ExecutionInfo, + rn: Runner + ): Result[RunQueryResult[T]] + def executeQuerySingle[T](string: String, prepare: Prepare, extractor: Extractor[T])( + executionInfo: ExecutionInfo, + rn: Runner + ): Result[RunQuerySingleResult[T]] def executeAction(sql: String, prepare: Prepare)(executionInfo: ExecutionInfo, rn: Runner): Result[RunActionResult] - def executeActionReturning[T](sql: String, prepare: Prepare, extractor: Extractor[T], returningBehavior: ReturnAction)(executionInfo: ExecutionInfo, rn: Runner): Result[RunActionReturningResult[T]] - def executeActionReturningMany[T](sql: String, prepare: Prepare, extractor: Extractor[T], returningBehavior: ReturnAction)(executionInfo: ExecutionInfo, rn: Runner): Result[RunActionReturningResult[List[T]]] - def executeBatchAction(groups: List[BatchGroup])(executionInfo: ExecutionInfo, rn: Runner): Result[RunBatchActionResult] - def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])(executionInfo: ExecutionInfo, rn: Runner): Result[RunBatchActionReturningResult[T]] + def executeActionReturning[T]( + sql: String, + prepare: Prepare, + extractor: Extractor[T], + returningBehavior: ReturnAction + )(executionInfo: ExecutionInfo, rn: Runner): Result[RunActionReturningResult[T]] + def executeActionReturningMany[T]( + sql: String, + prepare: Prepare, + extractor: Extractor[T], + returningBehavior: ReturnAction + )(executionInfo: ExecutionInfo, rn: Runner): Result[RunActionReturningResult[List[T]]] + def executeBatchAction( + groups: List[BatchGroup] + )(executionInfo: ExecutionInfo, rn: Runner): Result[RunBatchActionResult] + def executeBatchActionReturning[T](groups: List[BatchGroupReturning], extractor: Extractor[T])( + executionInfo: ExecutionInfo, + rn: Runner + ): Result[RunBatchActionReturningResult[T]] } /** - * Metadata related to query execution. Note that AST should be lazy so as not to be evaluated - * at runtime (which would happen with a by-value property since `{ ExecutionInfo(stuff, ast) } is spliced - * into a query-execution site). Additionally, there are performance overheads even splicing the finalized - * version of the AST into call sites of the `run` functions. For this reason, this functionality - * is being used only in ProtoQuill and only when a trait extends the trait AstSplicing. - * In the future it might potentially be controlled by a compiler argument. + * Metadata related to query execution. Note that AST should be lazy so as not + * to be evaluated at runtime (which would happen with a by-value property since + * `{ ExecutionInfo(stuff, ast) } is spliced into a query-execution site). + * Additionally, there are performance overheads even splicing the finalized + * version of the AST into call sites of the `run` functions. For this reason, + * this functionality is being used only in ProtoQuill and only when a trait + * extends the trait AstSplicing. In the future it might potentially be + * controlled by a compiler argument. */ class ExecutionInfo(val executionType: ExecutionType, queryAst: => Ast, queryTopLevelQuat: => Quat) { - def ast: Ast = queryAst + def ast: Ast = queryAst def topLevelQuat: Quat = queryTopLevelQuat } object ExecutionInfo { - def apply(executionType: ExecutionType, ast: => Ast, topLevelQuat: => Quat) = new ExecutionInfo(executionType, ast, topLevelQuat) + def apply(executionType: ExecutionType, ast: => Ast, topLevelQuat: => Quat) = + new ExecutionInfo(executionType, ast, topLevelQuat) val unknown = ExecutionInfo(ExecutionType.Unknown, io.getquill.ast.NullValue, Quat.Unknown) } @@ -67,7 +93,7 @@ trait AstSplicing sealed trait ExecutionType object ExecutionType { case object Dynamic extends ExecutionType - case object Static extends ExecutionType + case object Static extends ExecutionType case object Unknown extends ExecutionType } @@ -79,5 +105,8 @@ trait ProtoStreamContext[+Dialect <: io.getquill.idiom.Idiom, +Naming <: NamingS type StreamResult[T] type Session - def streamQuery[T](fetchSize: Option[Int], sql: String, prepare: Prepare, extractor: Extractor[T])(info: ExecutionInfo, rn: Runner): StreamResult[T] + def streamQuery[T](fetchSize: Option[Int], sql: String, prepare: Prepare, extractor: Extractor[T])( + info: ExecutionInfo, + rn: Runner + ): StreamResult[T] } diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala index fd06cfec5..8e7cc9e8e 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecution.scala @@ -8,7 +8,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -54,26 +54,43 @@ import io.getquill.util.Messages.TraceType import io.getquill.util.Format object ContextOperation { - case class SingleArgument[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( - sql: String, - prepare: (PrepareRow, Session) => (List[Any], PrepareRow), - extractor: Extraction[ResultRow, Session, T], - executionInfo: ExecutionInfo, - fetchSize: Option[Int] + case class SingleArgument[I, T, A <: QAC[I, _] with Action[ + I + ], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( + sql: String, + prepare: (PrepareRow, Session) => (List[Any], PrepareRow), + extractor: Extraction[ResultRow, Session, T], + executionInfo: ExecutionInfo, + fetchSize: Option[Int] ) - case class BatchArgument[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( - groups: List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])], - extractor: Extraction[ResultRow, Session, T], - executionInfo: ExecutionInfo, - fetchSize: Option[Int] + case class BatchArgument[I, T, A <: QAC[I, _] with Action[ + I + ], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( + groups: List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])], + extractor: Extraction[ResultRow, Session, T], + executionInfo: ExecutionInfo, + fetchSize: Option[Int] ) - case class Single[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res](val idiom: D, val naming: N)( - val execute: (ContextOperation.SingleArgument[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]) => Res + case class Single[I, T, A <: QAC[I, _] with Action[ + I + ], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( + val idiom: D, + val naming: N + )( + val execute: (ContextOperation.SingleArgument[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]) => Res ) - case class Batch[I, T, A <: QAC[I, _] with Action[I], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res](val idiom: D, val naming: N)( - val execute: (ContextOperation.BatchArgument[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]) => Res + case class Batch[I, T, A <: QAC[I, _] with Action[ + I + ], D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _], Res]( + val idiom: D, + val naming: N + )( + val execute: (ContextOperation.BatchArgument[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]) => Res ) - case class Factory[D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _]](val idiom: D, val naming: N) { + case class Factory[D <: Idiom, N <: NamingStrategy, PrepareRow, ResultRow, Session, Ctx <: Context[_, _]]( + val idiom: D, + val naming: N + ) { def op[I, T, Res] = ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res](idiom, naming) def batch[I, T, A <: QAC[I, T] with Action[I], Res] = @@ -108,7 +125,9 @@ object Execution { def identityConverter[T: Type](using Quotes) = '{ (t: T) => t } /** Summon decoder for a given Type and Row type (ResultRow) */ - def summonDecoderOrThrow[ResultRow: Type, Session: Type, DecoderT: Type]()(using Quotes): Expr[GenericDecoder[ResultRow, Session, DecoderT, DecodingType]] = { + def summonDecoderOrThrow[ResultRow: Type, Session: Type, DecoderT: Type]()(using + Quotes + ): Expr[GenericDecoder[ResultRow, Session, DecoderT, DecodingType]] = { import quotes.reflect.{Try => _, _} // First try summoning a specific encoder, if that doesn't work, use the generic one. // Note that we could do Expr.summon[GenericDecoder[..., DecodingType.Generic]] to summon it @@ -133,7 +152,8 @@ object Execution { } } - def makeDecoder[ResultRow: Type, Session: Type, RawT: Type](using Quotes)() = summonDecoderOrThrow[ResultRow, Session, RawT]() + def makeDecoder[ResultRow: Type, Session: Type, RawT: Type](using Quotes)() = + summonDecoderOrThrow[ResultRow, Session, RawT]() class MakeExtractor[ResultRow: Type, Session: Type, T: Type, RawT: Type] { def makeExtractorFrom(contramap: Expr[RawT => T])(using Quotes) = { @@ -141,7 +161,9 @@ object Execution { '{ (r: ResultRow, s: Session) => $contramap.apply(${ decoder }.apply(0, r, s)) } } - def static(state: StaticState, converter: Expr[RawT => T], extract: ExtractBehavior)(using Quotes): Expr[io.getquill.context.Extraction[ResultRow, Session, T]] = + def static(state: StaticState, converter: Expr[RawT => T], extract: ExtractBehavior)(using + Quotes + ): Expr[io.getquill.context.Extraction[ResultRow, Session, T]] = extract match { // TODO Allow passing in a starting index here? case ExtractBehavior.Extract => @@ -149,13 +171,17 @@ object Execution { '{ Extraction.Simple($extractor) } case ExtractBehavior.ExtractWithReturnAction => val extractor = makeExtractorFrom(converter) - val returnAction = state.returnAction.getOrElse { throw new IllegalArgumentException(s"Return action could not be found in the Query: ${query}") } + val returnAction = state.returnAction.getOrElse { + throw new IllegalArgumentException(s"Return action could not be found in the Query: ${query}") + } '{ Extraction.Returning($extractor, ${ io.getquill.parser.Lifter.returnAction(returnAction) }) } case ExtractBehavior.Skip => '{ Extraction.None } } - def dynamic(converter: Expr[RawT => T], extract: ExtractBehavior)(using Quotes): Expr[io.getquill.context.Extraction[ResultRow, Session, T]] = + def dynamic(converter: Expr[RawT => T], extract: ExtractBehavior)(using + Quotes + ): Expr[io.getquill.context.Extraction[ResultRow, Session, T]] = extract match { case ExtractBehavior.Extract => val extractor = makeExtractorFrom(converter) @@ -179,29 +205,29 @@ object Execution { object QueryExecution { class RunQuery[ - I: Type, - // Output type of the Quoted. E.g. People for query[People] or List[People] for query[People].returningMany(p => p) - // Also when a QueryMeta[OutputT, RawT] is used (e.g. `QueryMeta[PersonName, String]: queryMeta(Query[PersonName] => String)(String => PersonName)`) - // then OutputT is the output type that gets converted out from RawT. - T: Type, - ResultRow: Type, - PrepareRow: Type, - Session: Type, - D <: Idiom: Type, - N <: NamingStrategy: Type, - Ctx <: Context[_, _]: Type, - Res: Type + I: Type, + // Output type of the Quoted. E.g. People for query[People] or List[People] for query[People].returningMany(p => p) + // Also when a QueryMeta[OutputT, RawT] is used (e.g. `QueryMeta[PersonName, String]: queryMeta(Query[PersonName] => String)(String => PersonName)`) + // then OutputT is the output type that gets converted out from RawT. + T: Type, + ResultRow: Type, + PrepareRow: Type, + Session: Type, + D <: Idiom: Type, + N <: NamingStrategy: Type, + Ctx <: Context[_, _]: Type, + Res: Type ]( - quotedOp: Expr[Quoted[QAC[_, _]]], - contextOperation: Expr[ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], - fetchSize: Expr[Option[Int]], - wrap: Expr[OuterSelectWrap] + quotedOp: Expr[Quoted[QAC[_, _]]], + contextOperation: Expr[ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], + fetchSize: Expr[Option[Int]], + wrap: Expr[OuterSelectWrap] )(using val qctx: Quotes, QAC: Type[QAC[_, _]]) { import qctx.reflect.{Try => _, _} import Execution._ val transpileConfig = SummonTranspileConfig() - val interp = new Interpolator(TraceType.Execution, transpileConfig.traceConfig, 1) + val interp = new Interpolator(TraceType.Execution, transpileConfig.traceConfig, 1) import interp._ def apply() = @@ -235,12 +261,12 @@ object QueryExecution { } /** - * Summon all needed components and run executeQuery method - * (Experiment with catching `StaticTranslationMacro.apply` errors since they usually happen - * because some upstream construct has done a reportError so we do not want to do another one. - * I.e. if we do another returnError here it will override that one which is not needed. - * if this seems to work well, make the same change to other apply___ methods here. - * ) + * Summon all needed components and run executeQuery method (Experiment with + * catching `StaticTranslationMacro.apply` errors since they usually happen + * because some upstream construct has done a reportError so we do not want + * to do another one. I.e. if we do another returnError here it will + * override that one which is not needed. if this seems to work well, make + * the same change to other apply___ methods here. ) */ def applyQuery(quoted: Expr[Quoted[QAC[_, _]]]): Expr[Res] = { val topLevelQuat = QuatMaking.ofType[T] @@ -260,9 +286,20 @@ object QueryExecution { report.throwError(msg) // Otherwise the regular pipeline case scala.util.Success(Some(staticState)) => - executeStatic[T](staticState, identityConverter, ExtractBehavior.Extract, topLevelQuat) // Yes we can, do it! + executeStatic[T]( + staticState, + identityConverter, + ExtractBehavior.Extract, + topLevelQuat + ) // Yes we can, do it! case scala.util.Success(None) => - executeDynamic(quoted, identityConverter, ExtractBehavior.Extract, queryElaborationBehavior, topLevelQuat) // No we can't. Do dynamic + executeDynamic( + quoted, + identityConverter, + ExtractBehavior.Extract, + queryElaborationBehavior, + topLevelQuat + ) // No we can't. Do dynamic } } } @@ -281,14 +318,24 @@ object QueryExecution { case Some(staticState) => executeStatic[T](staticState, identityConverter, ExtractBehavior.ExtractWithReturnAction, topLevelQuat) case None => - executeDynamic(quoted, identityConverter, ExtractBehavior.ExtractWithReturnAction, ElaborationBehavior.Skip, Quat.Value) + executeDynamic( + quoted, + identityConverter, + ExtractBehavior.ExtractWithReturnAction, + ElaborationBehavior.Skip, + Quat.Value + ) } } - /** Run a query with a given QueryMeta given by the output type RawT and the conversion RawT back to OutputT */ + /** + * Run a query with a given QueryMeta given by the output type RawT and the + * conversion RawT back to OutputT + */ def runWithQueryMeta[RawT: Type](quoted: Expr[Quoted[QAC[_, _]]]): Expr[Res] = { val topLevelQuat = QuatMaking.ofType[RawT] - val (queryRawT, converter, staticStateOpt) = QueryMetaExtractor.applyImpl[T, RawT, D, N](quoted.asExprOf[Quoted[Query[T]]], topLevelQuat) + val (queryRawT, converter, staticStateOpt) = + QueryMetaExtractor.applyImpl[T, RawT, D, N](quoted.asExprOf[Quoted[Query[T]]], topLevelQuat) staticStateOpt match { case Some(staticState) => executeStatic[RawT](staticState, converter, ExtractBehavior.Extract, topLevelQuat) @@ -296,7 +343,13 @@ object QueryExecution { // Note: Can assume QuotationType is `Query` here since summonly a Query-meta is only allowed for Queries // Also: A previous implementation of this used QAC[I, T] => QAC[I, RawT] directly but was scrapped due to some Dotty issues // that later got fixed. If this implementation becomes cumbersome we can try that. - executeDynamic[RawT](queryRawT.asExprOf[Quoted[QAC[I, RawT]]], converter, ExtractBehavior.Extract, queryElaborationBehavior, topLevelQuat) + executeDynamic[RawT]( + queryRawT.asExprOf[Quoted[QAC[I, RawT]]], + converter, + ExtractBehavior.Extract, + queryElaborationBehavior, + topLevelQuat + ) } } @@ -318,43 +371,72 @@ object QueryExecution { } case other => report.throwError(s"""| - |Invalid planter found during lazy lift resolution: - |${io.getquill.util.Format.Expr(other)} - |All injectable planters should already have been elaborated into separate components. + |Invalid planter found during lazy lift resolution: + |${io.getquill.util.Format.Expr(other)} + |All injectable planters should already have been elaborated into separate components. """.stripMargin) } } /** - * Execute static query via ctx.executeQuery method given we have the ability to do so - * i.e. have a staticState + * Execute static query via ctx.executeQuery method given we have the + * ability to do so i.e. have a staticState */ - def executeStatic[RawT: Type](state: StaticState, converter: Expr[RawT => T], extract: ExtractBehavior, topLevelQuat: Quat): Expr[Res] = { + def executeStatic[RawT: Type]( + state: StaticState, + converter: Expr[RawT => T], + extract: ExtractBehavior, + topLevelQuat: Quat + ): Expr[Res] = { val lifts = resolveLazyLiftsStatic(state.lifts) trace"Original Lifts (including lazy): ${state.lifts.map(_.show)} resoved to: ${lifts.map(_.show)}".andLog() // Create the row-preparer to prepare the SQL Query object (e.g. PreparedStatement) // and the extractor to read out the results (e.g. ResultSet) - val prepare = '{ (row: PrepareRow, session: Session) => LiftsExtractor.apply[PrepareRow, Session](${ Expr.ofList(lifts) }, row, session) } + val prepare = '{ (row: PrepareRow, session: Session) => + LiftsExtractor.apply[PrepareRow, Session](${ Expr.ofList(lifts) }, row, session) + } val extractor = MakeExtractor[ResultRow, Session, T, RawT].static(state, converter, extract) val emptyContainsTokenExpr: Expr[Token => Token] = '{ $contextOperation.idiom.emptySetContainsToken(_) } - val liftingPlaceholderExpr: Expr[Int => String] = '{ $contextOperation.idiom.liftingPlaceholder } - val particularQuery = Particularize.Static(state.query, lifts, liftingPlaceholderExpr, emptyContainsTokenExpr, '{ 1 })(transpileConfig.traceConfig) + val liftingPlaceholderExpr: Expr[Int => String] = '{ $contextOperation.idiom.liftingPlaceholder } + val particularQuery = + Particularize.Static(state.query, lifts, liftingPlaceholderExpr, emptyContainsTokenExpr, '{ 1 })( + transpileConfig.traceConfig + ) // Plug in the components and execute val astSplice = if (TypeRepr.of[Ctx] <:< TypeRepr.of[AstSplicing]) Lifter(state.ast) else '{ io.getquill.ast.NullValue } - '{ $contextOperation.execute(ContextOperation.SingleArgument($particularQuery, $prepare, $extractor, ExecutionInfo(ExecutionType.Static, $astSplice, ${ Lifter.quat(topLevelQuat) }), $fetchSize)) } + '{ + $contextOperation.execute( + ContextOperation.SingleArgument( + $particularQuery, + $prepare, + $extractor, + ExecutionInfo(ExecutionType.Static, $astSplice, ${ Lifter.quat(topLevelQuat) }), + $fetchSize + ) + ) + } } // end executeStatic /** - * Expand dynamic-queries i.e. queries whose query-string cannot be computed at compile-time. - * Note that for now, QuotationType is only needed for dynamic queries (which is only needed to know whether you - * need to use ElaborateStructure or not. This is decided in the StaticTranslationMacro for static queries using a - * different method. I.e. since StaticTranslationMacro knows the AST node it infers Action/Query from that). + * Expand dynamic-queries i.e. queries whose query-string cannot be computed + * at compile-time. Note that for now, QuotationType is only needed for + * dynamic queries (which is only needed to know whether you need to use + * ElaborateStructure or not. This is decided in the StaticTranslationMacro + * for static queries using a different method. I.e. since + * StaticTranslationMacro knows the AST node it infers Action/Query from + * that). */ - def executeDynamic[RawT: Type](quote: Expr[Quoted[QAC[?, ?]]], converter: Expr[RawT => T], extract: ExtractBehavior, elaborationBehavior: ElaborationBehavior, topLevelQuat: Quat) = { + def executeDynamic[RawT: Type]( + quote: Expr[Quoted[QAC[?, ?]]], + converter: Expr[RawT => T], + extract: ExtractBehavior, + elaborationBehavior: ElaborationBehavior, + topLevelQuat: Quat + ) = { // Grab the ast from the quote and make that into an expression that we will pass into the dynamic evaluator // Expand the outermost quote using the macro and put it back into the quote // Is the expansion on T or RawT, need to investigate @@ -367,7 +449,8 @@ object QueryExecution { // re-run all the terms in `ast` which in the dynamic-api case could re-generate UIDs which does cause consistencies // because the UIDs would be replaced in the AST and overridden to values that are not the same as the EagerPlanters. val elaboratedAstQuote = '{ $quote.asInstanceOf[io.getquill.Quoted[io.getquill.QAC[I, RawT]]] } - val extractor: Expr[io.getquill.context.Extraction[ResultRow, Session, T]] = MakeExtractor[ResultRow, Session, T, RawT].dynamic(converter, extract) + val extractor: Expr[io.getquill.context.Extraction[ResultRow, Session, T]] = + MakeExtractor[ResultRow, Session, T, RawT].dynamic(converter, extract) // TODO What about when an extractor is not neededX val spliceAsts = TypeRepr.of[Ctx] <:< TypeRepr.of[AstSplicing] @@ -390,36 +473,41 @@ object QueryExecution { } // end RunQuery inline def apply[ - I, - T, - DecodeT, - ResultRow, - PrepareRow, - Session, - D <: Idiom, - N <: NamingStrategy, - Ctx <: Context[_, _], - Res - ](ctx: ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res])(inline quotedOp: Quoted[QAC[_, _]], fetchSize: Option[Int], inline wrap: OuterSelectWrap = OuterSelectWrap.Default) = + I, + T, + DecodeT, + ResultRow, + PrepareRow, + Session, + D <: Idiom, + N <: NamingStrategy, + Ctx <: Context[_, _], + Res + ](ctx: ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res])( + inline quotedOp: Quoted[QAC[_, _]], + fetchSize: Option[Int], + inline wrap: OuterSelectWrap = OuterSelectWrap.Default + ) = ${ applyImpl('quotedOp, 'ctx, 'fetchSize, 'wrap) } def applyImpl[ - I: Type, - T: Type, - DecodeT: Type, - ResultRow: Type, - PrepareRow: Type, - Session: Type, - D <: Idiom: Type, - N <: NamingStrategy: Type, - Ctx <: Context[_, _]: Type, - Res: Type + I: Type, + T: Type, + DecodeT: Type, + ResultRow: Type, + PrepareRow: Type, + Session: Type, + D <: Idiom: Type, + N <: NamingStrategy: Type, + Ctx <: Context[_, _]: Type, + Res: Type ]( - quotedOp: Expr[Quoted[QAC[_, _]]], - ctx: Expr[ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], - fetchSize: Expr[Option[Int]], - wrap: Expr[OuterSelectWrap] - )(using qctx: Quotes): Expr[Res] = new RunQuery[I, T, ResultRow, PrepareRow, Session, D, N, Ctx, Res](quotedOp, ctx, fetchSize, wrap).apply() + quotedOp: Expr[Quoted[QAC[_, _]]], + ctx: Expr[ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], + fetchSize: Expr[Option[Int]], + wrap: Expr[OuterSelectWrap] + )(using qctx: Quotes): Expr[Res] = + new RunQuery[I, T, ResultRow, PrepareRow, Session, D, N, Ctx, Res](quotedOp, ctx, fetchSize, wrap).apply() } // end QueryExecution @@ -431,27 +519,27 @@ object PrepareDynamicExecution { import io.getquill.context.Execution.ElaborationBehavior def apply[ - I, - T, - RawT, - D <: Idiom, - N <: NamingStrategy, - PrepareRow, - ResultRow, - Session + I, + T, + RawT, + D <: Idiom, + N <: NamingStrategy, + PrepareRow, + ResultRow, + Session ]( - quoted: Quoted[QAC[I, RawT]], - rawExtractor: Extraction[ResultRow, Session, T], - idiom: D, - naming: N, - elaborationBehavior: ElaborationBehavior, - topLevelQuat: Quat, - transpileConfig: TranspileConfig, - spliceBehavior: SpliceBehavior = SpliceBehavior.NeedsSplice, - // For a batch query, these are the other lifts besides the primary liftQuery lifts. - // This should be empty & ignored for all other query types. - additionalLifts: List[Planter[?, ?, ?]] = List(), - batchAlias: Option[String] = None + quoted: Quoted[QAC[I, RawT]], + rawExtractor: Extraction[ResultRow, Session, T], + idiom: D, + naming: N, + elaborationBehavior: ElaborationBehavior, + topLevelQuat: Quat, + transpileConfig: TranspileConfig, + spliceBehavior: SpliceBehavior = SpliceBehavior.NeedsSplice, + // For a batch query, these are the other lifts besides the primary liftQuery lifts. + // This should be empty & ignored for all other query types. + additionalLifts: List[Planter[?, ?, ?]] = List(), + batchAlias: Option[String] = None ) = { // Splice all quotation values back into the AST recursively, by this point these quotations are dynamic // which means that the compiler has not done the splicing for us. We need to do this ourselves. @@ -462,8 +550,12 @@ object PrepareDynamicExecution { // to do a beta-reduction first. val (splicedAstRaw, gatheredLifts) = spliceBehavior match { - case SpliceBehavior.NeedsSplice => (spliceQuotations(quoted), gatherLifts(quoted)) - case SpliceBehavior.AlreadySpliced => (quoted.ast, quoted.lifts) // If already spliced, can skip all runtimeQuotes clauses since their asts have already been spliced, same with lifts + case SpliceBehavior.NeedsSplice => (spliceQuotations(quoted), gatherLifts(quoted)) + case SpliceBehavior.AlreadySpliced => + ( + quoted.ast, + quoted.lifts + ) // If already spliced, can skip all runtimeQuotes clauses since their asts have already been spliced, same with lifts } VerifyFreeVariables.runtime(splicedAstRaw) @@ -474,9 +566,10 @@ object PrepareDynamicExecution { // println("=============== Dynamic Expanded Ast Is ===========\n" + io.getquill.util.Messages.qprint(splicedAst)) // Tokenize the spliced AST - val queryType = IdiomContext.QueryType.discoverFromAst(splicedAst, batchAlias) + val queryType = IdiomContext.QueryType.discoverFromAst(splicedAst, batchAlias) val idiomContext = IdiomContext(transpileConfig, queryType) - val (outputAst, stmt, _) = idiom.translate(splicedAst, topLevelQuat, ExecutionType.Dynamic, idiomContext)(using naming) + val (outputAst, stmt, _) = + idiom.translate(splicedAst, topLevelQuat, ExecutionType.Dynamic, idiomContext)(using naming) val naiveQury = Unparticular.translateNaive(stmt, idiom.liftingPlaceholder) val liftColumns = @@ -539,7 +632,7 @@ object PrepareDynamicExecution { def spliceQuotations(quoted: Quoted[_]): Ast = { def spliceQuotationsRecurse(quoted: Quoted[_]): Ast = { val quotationVases = quoted.runtimeQuotes - val ast = quoted.ast + val ast = quoted.ast // Get all the quotation tags Transform(ast) { // Splice the corresponding vase for every tag, then recurse @@ -566,9 +659,9 @@ object PrepareDynamicExecution { } private[getquill] def processLifts( - lifts: List[Planter[_, _, _]], - matchingExternals: List[External], - secondaryLifts: List[Planter[_, _, _]] = List() + lifts: List[Planter[_, _, _]], + matchingExternals: List[External], + secondaryLifts: List[Planter[_, _, _]] = List() ): Either[String, (List[Planter[_, _, _]], List[Planter[_, _, _]])] = { val encodeablesMap = lifts.map(e => (e.uid, e)).toMap @@ -577,8 +670,8 @@ object PrepareDynamicExecution { secondaryLifts.map(e => (e.uid, e)).toMap val uidsOfScalarTags = - matchingExternals.collect { - case tag: ScalarTag => tag.uid + matchingExternals.collect { case tag: ScalarTag => + tag.uid } enum UidStatus { @@ -596,23 +689,22 @@ object PrepareDynamicExecution { } val sortedEncodeables = - uidsOfScalarTags - .map { uid => - encodeablesMap.get(uid) match { - case Some(element) => UidStatus.Primary(uid, element) - case None => - secondaryEncodeablesMap.get(uid) match { - case Some(element) => UidStatus.Secondary(uid, element) - case None => UidStatus.NotFound(uid) - } - } + uidsOfScalarTags.map { uid => + encodeablesMap.get(uid) match { + case Some(element) => UidStatus.Primary(uid, element) + case None => + secondaryEncodeablesMap.get(uid) match { + case Some(element) => UidStatus.Secondary(uid, element) + case None => UidStatus.NotFound(uid) + } } + } object HasNotFoundUids { def unapply(statuses: List[UidStatus]) = { val collected = - statuses.collect { - case UidStatus.NotFound(uid) => uid + statuses.collect { case UidStatus.NotFound(uid) => + uid } if (collected.nonEmpty) Some(collected) else None } @@ -625,7 +717,7 @@ object PrepareDynamicExecution { case UidStatus.Primary(_, _) => true case _ => false } - val primariesFound = primaries.collect { case p: UidStatus.Primary => p } + val primariesFound = primaries.collect { case p: UidStatus.Primary => p } val secondariesFound = secondaries.collect { case s: UidStatus.Secondary => s } val goodPartitioning = primariesFound.length == primaries.length && secondariesFound.length == secondaries.length @@ -660,8 +752,8 @@ object PrepareDynamicExecution { } // end PrepareDynamicExecution /** - * Drives dynamic execution from the Context - * Note that AST is already elaborated by the time it comes into here + * Drives dynamic execution from the Context Note that AST is already elaborated + * by the time it comes into here */ object RunDynamicExecution { @@ -672,41 +764,63 @@ object RunDynamicExecution { import io.getquill.context.Execution.ElaborationBehavior def apply[ - I, - T, - RawT, - D <: Idiom, - N <: NamingStrategy, - PrepareRow, - ResultRow, - Session, - Ctx <: Context[_, _], - Res + I, + T, + RawT, + D <: Idiom, + N <: NamingStrategy, + PrepareRow, + ResultRow, + Session, + Ctx <: Context[_, _], + Res ]( - quoted: Quoted[QAC[I, RawT]], - ctx: ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res], - rawExtractor: Extraction[ResultRow, Session, T], - spliceAst: Boolean, - fetchSize: Option[Int], - elaborationBehavior: ElaborationBehavior, - topLevelQuat: Quat, - transpileConfig: TranspileConfig + quoted: Quoted[QAC[I, RawT]], + ctx: ContextOperation.Single[I, T, Nothing, D, N, PrepareRow, ResultRow, Session, Ctx, Res], + rawExtractor: Extraction[ResultRow, Session, T], + spliceAst: Boolean, + fetchSize: Option[Int], + elaborationBehavior: ElaborationBehavior, + topLevelQuat: Quat, + transpileConfig: TranspileConfig ): Res = { // println("===== Passed Ast: " + io.getquill.util.Messages.qprint(quoted.ast)) val (stmt, outputAst, sortedLifts, extractor, sortedSecondaryLifts) = - PrepareDynamicExecution[I, T, RawT, D, N, PrepareRow, ResultRow, Session](quoted, rawExtractor, ctx.idiom, ctx.naming, elaborationBehavior, topLevelQuat, transpileConfig) + PrepareDynamicExecution[I, T, RawT, D, N, PrepareRow, ResultRow, Session]( + quoted, + rawExtractor, + ctx.idiom, + ctx.naming, + elaborationBehavior, + topLevelQuat, + transpileConfig + ) // Turn the Tokenized AST into an actual string and pull out the ScalarTags (i.e. the lifts) val (unparticularQuery, _) = Unparticular.Query.fromStatement(stmt, ctx.idiom.liftingPlaceholder) // TODO don't really need lift-sorting in PrepareDynamicExecution anymore? Could use liftsOrderer to do that - val (queryString, _) = Particularize.Dynamic(unparticularQuery, sortedLifts ++ sortedSecondaryLifts, ctx.idiom.liftingPlaceholder, ctx.idiom.emptySetContainsToken)(transpileConfig.traceConfig) + val (queryString, _) = Particularize.Dynamic( + unparticularQuery, + sortedLifts ++ sortedSecondaryLifts, + ctx.idiom.liftingPlaceholder, + ctx.idiom.emptySetContainsToken + )(transpileConfig.traceConfig) // Use the sortedLifts to prepare the method that will prepare the SQL statement - val prepare = (row: PrepareRow, session: Session) => LiftsExtractor.Dynamic[PrepareRow, Session](sortedLifts, row, session) + val prepare = (row: PrepareRow, session: Session) => + LiftsExtractor.Dynamic[PrepareRow, Session](sortedLifts, row, session) // Exclute the SQL Statement val executionAst = if (spliceAst) outputAst else io.getquill.ast.NullValue - ctx.execute(ContextOperation.SingleArgument(queryString, prepare, extractor, ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), fetchSize)) + ctx.execute( + ContextOperation.SingleArgument( + queryString, + prepare, + extractor, + ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), + fetchSize + ) + ) } } // end RunDynamicExecution diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatch.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatch.scala index cedca1195..76d562c03 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatch.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatch.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -72,17 +72,14 @@ private[getquill] enum BatchActionType { } /** - * In some cases the action that goes inside the batch needs an infix. For example, for SQL server - * to be able to do batch inserts of rows with IDs you need to do something like: - * {{ - * liftQuery(products).foreach(p => - * sql"SET IDENTITY_INSERT Product ON; ${query[Product].insertValue(p)}".as[Insert[Int]]) - * }} - * In order to yield something like this: - * {{ - * SET IDENTITY_INSERT Product ON; INSERT INTO Product (id,description,sku) VALUES (?, ?, ?) - * }} - * Otherwise SQLServer will not let you insert the row because `IDENTITY_INSERT` will be off. + * In some cases the action that goes inside the batch needs an infix. For + * example, for SQL server to be able to do batch inserts of rows with IDs you + * need to do something like: {{ liftQuery(products).foreach(p => sql"SET + * IDENTITY_INSERT Product ON; + * ${query[Product].insertValue(p)}".as[Insert[Int]]) }} In order to yield + * something like this: {{ SET IDENTITY_INSERT Product ON; INSERT INTO Product + * (id,description,sku) VALUES (?, ?, ?) }} Otherwise SQLServer will not let you + * insert the row because `IDENTITY_INSERT` will be off. */ object PossiblyInfixAction { private def isTailAction(actionAst: Ast) = @@ -112,7 +109,12 @@ object PrepareBatchComponents { import Execution._ import QueryExecutionBatchModel._ - def apply[I, PrepareRow](unliftedAst: Ast, foreachIdentAst: ast.Ast, extractionBehavior: BatchExtractBehavior, traceConfig: TraceConfig): Either[String, (Ident, Ast, BatchActionType)] = { + def apply[I, PrepareRow]( + unliftedAst: Ast, + foreachIdentAst: ast.Ast, + extractionBehavior: BatchExtractBehavior, + traceConfig: TraceConfig + ): Either[String, (Ident, Ast, BatchActionType)] = { // putting this in a block since I don't want to externally import these packages import io.getquill.ast._ val componentsOrError = @@ -122,7 +124,9 @@ object PrepareBatchComponents { case Foreach(_, foreachIdent, actionQueryAst @ ActionEntity(bType)) => Right(foreachIdent, actionQueryAst, bType) case other => - Left(s"Malformed batch entity: ${io.getquill.util.Messages.qprint(other)}. Batch insertion entities must have the form Insert(Entity, Nil: List[Assignment])") + Left( + s"Malformed batch entity: ${io.getquill.util.Messages.qprint(other)}. Batch insertion entities must have the form Insert(Entity, Nil: List[Assignment])" + ) } case ExtractBehavior.ExtractWithReturnAction => @@ -137,7 +141,9 @@ object PrepareBatchComponents { case _: ReturningGenerated => Right(foreachIdent, actionQueryAst, bType) } case other => - Left(s"Malformed batch entity: ${other}. Batch insertion entities must have the form Returning/ReturningGenerated(Insert(Entity, Nil: List[Assignment]), _, _)") + Left( + s"Malformed batch entity: ${other}. Batch insertion entities must have the form Returning/ReturningGenerated(Insert(Entity, Nil: List[Assignment]), _, _)" + ) } } @@ -214,17 +220,21 @@ object QueryExecutionBatch { import QueryExecutionBatchModel.{_, given} private[getquill] class RunQuery[ - I: Type, - T: Type, - A <: QAC[I, T] & Action[I]: Type, - ResultRow: Type, - PrepareRow: Type, - Session: Type, - D <: Idiom: Type, - N <: NamingStrategy: Type, - Ctx <: Context[_, _], - Res: Type - ](quotedRaw: Expr[Quoted[BatchAction[A]]], batchContextOperation: Expr[ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], rowsPerQuery: Expr[Int])(using Quotes, Type[Ctx]) { + I: Type, + T: Type, + A <: QAC[I, T] & Action[I]: Type, + ResultRow: Type, + PrepareRow: Type, + Session: Type, + D <: Idiom: Type, + N <: NamingStrategy: Type, + Ctx <: Context[_, _], + Res: Type + ]( + quotedRaw: Expr[Quoted[BatchAction[A]]], + batchContextOperation: Expr[ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], + rowsPerQuery: Expr[Int] + )(using Quotes, Type[Ctx]) { import quotes.reflect._ val topLevelQuat = QuatMaking.ofType[T] @@ -252,12 +262,19 @@ object QueryExecutionBatch { } /** - * (TODO need to fix querySchema with batch usage i.e. liftQuery(people).insert(p => querySchema[Person](...).insertValue(p)) - * Create a quotation with the elaborated entity - * e.g. given liftQuery(people).foreach(p => query[Person].insert[Person](p)) - * then create a liftQuery(people).foreach(p => query[Person].insert[Person](_.name -> lift(p.name), _.age -> lift(p.age))) + * (TODO need to fix querySchema with batch usage i.e. + * liftQuery(people).insert(p => querySchema[Person](...).insertValue(p)) + * Create a quotation with the elaborated entity e.g. given + * liftQuery(people).foreach(p => query[Person].insert[Person](p)) then + * create a liftQuery(people).foreach(p => + * query[Person].insert[Person](_.name -> lift(p.name), _.age -> + * lift(p.age))) */ - def expandQuotation(actionQueryAstExpr: Expr[Ast], batchActionType: BatchActionType, perRowLifts: Expr[List[InjectableEagerPlanter[_, PrepareRow, Session]]]) = + def expandQuotation( + actionQueryAstExpr: Expr[Ast], + batchActionType: BatchActionType, + perRowLifts: Expr[List[InjectableEagerPlanter[_, PrepareRow, Session]]] + ) = batchActionType match { case BatchActionType.Insert => '{ Quoted[Insert[I]]($actionQueryAstExpr, ${ perRowLifts }, Nil) } case BatchActionType.Update => '{ Quoted[Update[I]]($actionQueryAstExpr, ${ perRowLifts }, Nil) } @@ -269,13 +286,14 @@ object QueryExecutionBatch { /** * ********************************************************************************************************* - * ************************************** Prepare Dynamic Batch Query ************************************** + * ************************************** Prepare Dynamic Batch Query + * ************************************** * ********************************************************************************************************* */ def applyDynamic(): Expr[Res] = { val extractionBehaviorExpr = Expr(extractionBehavior) - val extractor = MakeExtractor[ResultRow, Session, T, T].dynamic(identityConverter, extractionBehavior) - val transpileConfig = SummonTranspileConfig() + val extractor = MakeExtractor[ResultRow, Session, T, T].dynamic(identityConverter, extractionBehavior) + val transpileConfig = SummonTranspileConfig() '{ QueryExecutionBatchDynamic.apply[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res]( $quotedRaw, @@ -299,11 +317,14 @@ object QueryExecutionBatch { def apply(): Expr[Res] = UntypeExpr(quoted) match { case QuotedExpr.UprootableWithLifts(QuotedExpr(quoteAst, _, _), planters) => - val unliftedAst = Unlifter(quoteAst) - val comps = BatchStatic[I, PrepareRow, Session](unliftedAst, planters, extractionBehavior) + val unliftedAst = Unlifter(quoteAst) + val comps = BatchStatic[I, PrepareRow, Session](unliftedAst, planters, extractionBehavior) val expandedQuotation = expandQuotation(comps.actionQueryAst, comps.batchActionType, comps.perRowLifts) - def expandLiftQueryMembers(filteredPerRowLifts: List[InjectableEagerPlanterExpr[?, ?, ?]], entities: Expr[Iterable[?]]) = + def expandLiftQueryMembers( + filteredPerRowLifts: List[InjectableEagerPlanterExpr[?, ?, ?]], + entities: Expr[Iterable[?]] + ) = '{ $entities.map(entity => ${ @@ -317,7 +338,7 @@ object QueryExecutionBatch { // we need a pre-filtered, and ordered list of lifts. The StaticTranslationMacro interanally has done that so we can take the lifts from there although they need to be casted. // This is safe because they are just the lifts taht we have already had from the `injectableLifts` list // TODO If all the lists are not InjectableEagerPlanterExpr, then we need to find out which ones are not and not inject them - val injectedLifts = filteredPerRowLifts.map(lift => lift.inject('entity)) + val injectedLifts = filteredPerRowLifts.map(lift => lift.inject('entity)) val injectedLiftsExpr = Expr.ofList(injectedLifts) // val prepare = '{ (row: PrepareRow, session: Session) => LiftsExtractor.apply[PrepareRow, Session]($injectedLiftsExpr, row, session) } // prepare @@ -326,11 +347,18 @@ object QueryExecutionBatch { ) } - StaticTranslationMacro[D, N](expandedQuotation, ElaborationBehavior.Skip, topLevelQuat, comps.categorizedPlanters.map(_.planter), Some(comps.foreachIdent)) match { + StaticTranslationMacro[D, N]( + expandedQuotation, + ElaborationBehavior.Skip, + topLevelQuat, + comps.categorizedPlanters.map(_.planter), + Some(comps.foreachIdent) + ) match { case Some(state @ StaticState(query, filteredPerRowLiftsRaw, _, _, secondaryLifts)) => // create an extractor for returning actions val filteredPerRowLifts = filteredPerRowLiftsRaw.asInstanceOf[List[InjectableEagerPlanterExpr[_, _, _]]] - val extractor = MakeExtractor[ResultRow, Session, T, T].static(state, identityConverter, extractionBehavior) + val extractor = + MakeExtractor[ResultRow, Session, T, T].static(state, identityConverter, extractionBehavior) // In an expression we could have a whole bunch of different lifts // liftQuery([Person1, Person2 <- these are EagerEntitiesPlanterExpr]) @@ -377,11 +405,13 @@ object QueryExecutionBatch { val allPlanterExprs = (filteredPerRowLifts ++ secondaryLifts).map(_.plant) val originalPlantersExpr = Expr.ofList(filteredPerRowLifts.map(_.plant)) - val emptyContainsTokenExpr: Expr[Token => Token] = '{ $batchContextOperation.idiom.emptySetContainsToken(_) } + val emptyContainsTokenExpr: Expr[Token => Token] = '{ + $batchContextOperation.idiom.emptySetContainsToken(_) + } val liftingPlaceholderExpr: Expr[Int => String] = '{ $batchContextOperation.idiom.liftingPlaceholder } - val queryExpr = Particularize.UnparticularQueryLiftable(state.query) - val traceConfig = SummonTranspileConfig().traceConfig - val traceConfigExpr = TranspileConfigLiftable(traceConfig) + val queryExpr = Particularize.UnparticularQueryLiftable(state.query) + val traceConfig = SummonTranspileConfig().traceConfig + val traceConfigExpr = TranspileConfigLiftable(traceConfig) import QueryExecutionBatchModel.{_, given} val extractionBehaviorExpr = Expr(extractionBehavior) @@ -401,7 +431,14 @@ object QueryExecutionBatch { } '{ - $batchContextOperation.execute(ContextOperation.BatchArgument($batchGroups, $extractor, ExecutionInfo(ExecutionType.Static, ${ Lifter(state.ast) }, ${ Lifter.quat(topLevelQuat) }), None)) + $batchContextOperation.execute( + ContextOperation.BatchArgument( + $batchGroups, + $extractor, + ExecutionInfo(ExecutionType.Static, ${ Lifter(state.ast) }, ${ Lifter.quat(topLevelQuat) }), + None + ) + ) } case None => @@ -420,59 +457,68 @@ object QueryExecutionBatch { /** * ******************************************************************************************************** - * ************************************** Prepare Static Batch Query ************************************** + * ************************************** Prepare Static Batch Query + * ************************************** * ******************************************************************************************************** */ inline def apply[ - I, - T, - A <: QAC[I, T] with Action[I], - ResultRow, - PrepareRow, - Session, - D <: Idiom, - N <: NamingStrategy, - Ctx <: Context[_, _], - Res - ](ctx: ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res], rowsPerQuery: Int)(inline quoted: Quoted[BatchAction[A]]) = + I, + T, + A <: QAC[I, T] with Action[I], + ResultRow, + PrepareRow, + Session, + D <: Idiom, + N <: NamingStrategy, + Ctx <: Context[_, _], + Res + ](ctx: ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res], rowsPerQuery: Int)( + inline quoted: Quoted[BatchAction[A]] + ) = ${ applyImpl[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res]('quoted, 'ctx, 'rowsPerQuery) } def applyImpl[ - I: Type, - T: Type, - A <: QAC[I, T] with Action[I]: Type, - ResultRow: Type, - PrepareRow: Type, - Session: Type, - D <: Idiom: Type, - N <: NamingStrategy: Type, - Ctx <: Context[_, _], - Res: Type - ](quoted: Expr[Quoted[BatchAction[A]]], ctx: Expr[ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], rowsPerQuery: Expr[Int])(using Quotes, Type[Ctx]): Expr[Res] = + I: Type, + T: Type, + A <: QAC[I, T] with Action[I]: Type, + ResultRow: Type, + PrepareRow: Type, + Session: Type, + D <: Idiom: Type, + N <: NamingStrategy: Type, + Ctx <: Context[_, _], + Res: Type + ]( + quoted: Expr[Quoted[BatchAction[A]]], + ctx: Expr[ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res]], + rowsPerQuery: Expr[Int] + )(using Quotes, Type[Ctx]): Expr[Res] = new RunQuery[I, T, A, ResultRow, PrepareRow, Session, D, N, Ctx, Res](quoted, ctx, rowsPerQuery).apply() } // end QueryExecutionBatch object BatchStatic { case class Components[PrepareRow, Session]( - actionQueryAst: Expr[Ast], - batchActionType: BatchActionType, - perRowLifts: Expr[List[InjectableEagerPlanter[?, PrepareRow, Session]]], - categorizedPlanters: List[PlanterKind.Other], - primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, - foreachIdent: Ident + actionQueryAst: Expr[Ast], + batchActionType: BatchActionType, + perRowLifts: Expr[List[InjectableEagerPlanter[?, PrepareRow, Session]]], + categorizedPlanters: List[PlanterKind.Other], + primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, + foreachIdent: Ident ) sealed trait PlanterKind object PlanterKind { case class PrimaryEntitiesList(planter: EagerEntitiesPlanterExpr[?, ?, ?]) extends PlanterKind - case class PrimaryScalarList(planter: EagerListPlanterExpr[?, ?, ?]) extends PlanterKind - case class Other(planter: PlanterExpr[?, ?, ?]) extends PlanterKind + case class PrimaryScalarList(planter: EagerListPlanterExpr[?, ?, ?]) extends PlanterKind + case class Other(planter: PlanterExpr[?, ?, ?]) extends PlanterKind } def organizePlanters(planters: List[PlanterExpr[?, ?, ?]])(using Quotes) = { import quotes.reflect._ - planters.foldLeft((Option.empty[PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList], List.empty[PlanterKind.Other])) { + planters.foldLeft( + (Option.empty[PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList], List.empty[PlanterKind.Other]) + ) { case ((None, list), planter: EagerEntitiesPlanterExpr[?, ?, ?]) => val planterKind = PlanterKind.PrimaryEntitiesList(planter) (Some(planterKind), list) @@ -486,21 +532,31 @@ object BatchStatic { report.throwError("Invalid planter traversal") } match { case (Some(primary), categorizedPlanters) => (primary, categorizedPlanters) - case (None, _) => report.throwError(s"Could not find an entities list-lift (i.e. liftQuery(entities/scalars) in liftQuery(...).foreach()) in lifts: ${planters.map(p => Format.Expr(p.plant))}") + case (None, _) => + report.throwError( + s"Could not find an entities list-lift (i.e. liftQuery(entities/scalars) in liftQuery(...).foreach()) in lifts: ${planters + .map(p => Format.Expr(p.plant))}" + ) } } def extractPrimaryComponents[I: Type, PrepareRow: Type, Session: Type]( - primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, - ast: Ast, - extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior, - traceConfig: TraceConfig + primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, + ast: Ast, + extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior, + traceConfig: TraceConfig )(using Quotes) = primaryPlanter match { // In the case of liftQuery(entities) case PlanterKind.PrimaryEntitiesList(planter) => - val (foreachIdent, actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, planter.fieldClass, extractionBehavior, traceConfig).rightOrThrow() - (foreachIdent, Lifter(actionQueryAst), batchActionType, planter.fieldGetters.asInstanceOf[Expr[List[InjectableEagerPlanter[?, PrepareRow, Session]]]]) + val (foreachIdent, actionQueryAst, batchActionType) = + PrepareBatchComponents[I, PrepareRow](ast, planter.fieldClass, extractionBehavior, traceConfig).rightOrThrow() + ( + foreachIdent, + Lifter(actionQueryAst), + batchActionType, + planter.fieldGetters.asInstanceOf[Expr[List[InjectableEagerPlanter[?, PrepareRow, Session]]]] + ) // In the case of liftQuery(scalars) // Note, we could have potential other liftQuery(scalars) later in the query for example: // liftQuery(List("Joe","Jack","Jill")).foreach(query[Person].filter(name => liftQuery(1,2,3 /*ids of Joe,Jack,Jill respectively*/).contains(p.id)).update(_.name -> name)) @@ -510,16 +566,33 @@ object BatchStatic { case '[tt] => val uuid = java.util.UUID.randomUUID.toString val (foreachReplacementAst, perRowLift) = - (ScalarTag(uuid, Source.Parser), '{ InjectableEagerPlanter((t: tt) => t, ${ planter.encoder.asInstanceOf[Expr[io.getquill.generic.GenericEncoder[tt, PrepareRow, Session]]] }, ${ Expr(uuid) }) }) + ( + ScalarTag(uuid, Source.Parser), + '{ + InjectableEagerPlanter( + (t: tt) => t, + ${ + planter.encoder.asInstanceOf[Expr[io.getquill.generic.GenericEncoder[tt, PrepareRow, Session]]] + }, + ${ Expr(uuid) } + ) + } + ) // create the full batch-query Ast using the value of actual query of the batch statement i.e. I in: // liftQuery[...](...).foreach(p => query[I].insertValue(p)) - val (foreachIdent, actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, foreachReplacementAst, extractionBehavior, traceConfig).rightOrThrow() + val (foreachIdent, actionQueryAst, batchActionType) = + PrepareBatchComponents[I, PrepareRow](ast, foreachReplacementAst, extractionBehavior, traceConfig) + .rightOrThrow() // return the combined batch components (foreachIdent, Lifter(actionQueryAst), batchActionType, Expr.ofList(List(perRowLift))) } } - def apply[I: Type, PrepareRow: Type, Session: Type](ast: Ast, planters: List[PlanterExpr[?, ?, ?]], extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior)(using Quotes) = { + def apply[I: Type, PrepareRow: Type, Session: Type]( + ast: Ast, + planters: List[PlanterExpr[?, ?, ?]], + extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior + )(using Quotes) = { import quotes.reflect._ // Given: Person(name, age) @@ -544,9 +617,21 @@ object BatchStatic { // ast = lift(UUID1) // I.e. ScalarTag(UUID1) since lift in the AST means a ScalarTag // lifts = List(InjectableEagerLift(p, UUID1)) // TODO check that there are no EagerEntitiesPlanterExpr other than in the primary planter - val (foreachIdent, actionQueryAst, batchActionType, perRowLifts) = extractPrimaryComponents[I, PrepareRow, Session](primaryPlanter, ast, extractionBehavior, SummonTranspileConfig().traceConfig) - - Components[PrepareRow, Session](actionQueryAst, batchActionType, perRowLifts, categorizedPlanters, primaryPlanter, foreachIdent) + val (foreachIdent, actionQueryAst, batchActionType, perRowLifts) = extractPrimaryComponents[I, PrepareRow, Session]( + primaryPlanter, + ast, + extractionBehavior, + SummonTranspileConfig().traceConfig + ) + + Components[PrepareRow, Session]( + actionQueryAst, + batchActionType, + perRowLifts, + categorizedPlanters, + primaryPlanter, + foreachIdent + ) } // end apply extension [T](element: Either[String, T])(using Quotes) { diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchDynamic.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchDynamic.scala index 0d1a35b3b..da1336e58 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchDynamic.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchDynamic.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -67,29 +67,29 @@ object QueryExecutionBatchDynamic { import PrepareDynamicExecution._ def apply[ - I, - T, - A <: QAC[I, T] & Action[I], - ResultRow, - PrepareRow, - Session, - D <: Idiom, - N <: NamingStrategy, - Ctx <: Context[_, _], - Res + I, + T, + A <: QAC[I, T] & Action[I], + ResultRow, + PrepareRow, + Session, + D <: Idiom, + N <: NamingStrategy, + Ctx <: Context[_, _], + Res ]( - quotedRaw: Quoted[BatchAction[A]], - batchContextOperation: ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res], - extractionBehavior: BatchExtractBehavior, - rawExtractor: Extraction[ResultRow, Session, T], - topLevelQuat: Quat, - transpileConfig: TranspileConfig, - batchingBehavior: BatchingBehavior + quotedRaw: Quoted[BatchAction[A]], + batchContextOperation: ContextOperation.Batch[I, T, A, D, N, PrepareRow, ResultRow, Session, Ctx, Res], + extractionBehavior: BatchExtractBehavior, + rawExtractor: Extraction[ResultRow, Session, T], + topLevelQuat: Quat, + transpileConfig: TranspileConfig, + batchingBehavior: BatchingBehavior ) = { // since real quotation could possibly be nested, need to get all splice all quotes and get all lifts in all runtimeQuote sections first - val ast = spliceQuotations(quotedRaw) - val lifts = gatherLifts(quotedRaw) - val idiom = batchContextOperation.idiom + val ast = spliceQuotations(quotedRaw) + val lifts = gatherLifts(quotedRaw) + val idiom = batchContextOperation.idiom val naming = batchContextOperation.naming // println(s"===== Spliced Ast: ====\n${io.getquill.util.Messages.qprint(ast)}") @@ -117,12 +117,22 @@ object QueryExecutionBatchDynamic { // ast = lift(UUID1) // I.e. ScalarTag(UUID1) since lift in the AST means a ScalarTag // lifts = List(InjectableEagerLift(p, UUID1)) val (foreachIdent, actionQueryAst, batchActionType, perRowLifts) = - extractPrimaryComponents[I, PrepareRow, Session](primaryPlanter, ast, extractionBehavior, transpileConfig.traceConfig) + extractPrimaryComponents[I, PrepareRow, Session]( + primaryPlanter, + ast, + extractionBehavior, + transpileConfig.traceConfig + ) // equivalent to static expandQuotation result val dynamicExpandedQuotation = batchActionType match { - case BatchActionType.Insert => Quoted[Insert[I]](actionQueryAst, perRowLifts, Nil) // Already gathered queries and lifts from sub-clauses, don't need them anymore + case BatchActionType.Insert => + Quoted[Insert[I]]( + actionQueryAst, + perRowLifts, + Nil + ) // Already gathered queries and lifts from sub-clauses, don't need them anymore case BatchActionType.Update => Quoted[Update[I]](actionQueryAst, perRowLifts, Nil) // We need lifts for 'Delete' because it could have a WHERE clause case BatchActionType.Delete => Quoted[Delete[I]](actionQueryAst, perRowLifts, Nil) @@ -173,9 +183,16 @@ object QueryExecutionBatchDynamic { extractionBehavior )(transpileConfig.traceConfig) - val spliceAst = false + val spliceAst = false val executionAst = if (spliceAst) outputAst else io.getquill.ast.NullValue - batchContextOperation.execute(ContextOperation.BatchArgument(batchGroups, extractor, ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), None)) + batchContextOperation.execute( + ContextOperation.BatchArgument( + batchGroups, + extractor, + ExecutionInfo(ExecutionType.Dynamic, executionAst, topLevelQuat), + None + ) + ) } extension [T](element: Either[String, T]) { @@ -191,12 +208,14 @@ object QueryExecutionBatchDynamic { sealed trait PlanterKind object PlanterKind { case class PrimaryEntitiesList(planter: EagerEntitiesPlanter[?, ?, ?]) extends PlanterKind - case class PrimaryScalarList(planter: EagerListPlanter[?, ?, ?]) extends PlanterKind - case class Other(planter: Planter[?, ?, ?]) extends PlanterKind + case class PrimaryScalarList(planter: EagerListPlanter[?, ?, ?]) extends PlanterKind + case class Other(planter: Planter[?, ?, ?]) extends PlanterKind } def organizePlanters(planters: List[Planter[?, ?, ?]]) = - planters.foldLeft((Option.empty[PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList], List.empty[PlanterKind.Other])) { + planters.foldLeft( + (Option.empty[PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList], List.empty[PlanterKind.Other]) + ) { case ((None, list), planter: EagerEntitiesPlanter[?, ?, ?]) => val planterKind = PlanterKind.PrimaryEntitiesList(planter) (Some(planterKind), list) @@ -210,20 +229,30 @@ object QueryExecutionBatchDynamic { throw new IllegalArgumentException("Invalid planter traversal") } match { case (Some(primary), categorizedPlanters) => (primary, categorizedPlanters) - case (None, _) => throw new IllegalArgumentException(s"Could not find an entities list-lift (i.e. liftQuery(entities/scalars) in liftQuery(...).foreach()) in lifts: ${planters}") + case (None, _) => + throw new IllegalArgumentException( + s"Could not find an entities list-lift (i.e. liftQuery(entities/scalars) in liftQuery(...).foreach()) in lifts: ${planters}" + ) } def extractPrimaryComponents[I, PrepareRow, Session]( - primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, - ast: Ast, - extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior, - traceConfig: TraceConfig + primaryPlanter: PlanterKind.PrimaryEntitiesList | PlanterKind.PrimaryScalarList, + ast: Ast, + extractionBehavior: QueryExecutionBatchModel.BatchExtractBehavior, + traceConfig: TraceConfig ): (Ident, Ast, BatchActionType, List[InjectableEagerPlanter[?, PrepareRow, Session]]) = primaryPlanter match { // In the case of liftQuery(entities) case PlanterKind.PrimaryEntitiesList(planter) => - val (foreachIdent, actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, planter.fieldClass, extractionBehavior, traceConfig).rightOrException() - (foreachIdent, actionQueryAst, batchActionType, planter.fieldGetters.asInstanceOf[List[InjectableEagerPlanter[?, PrepareRow, Session]]]) + val (foreachIdent, actionQueryAst, batchActionType) = + PrepareBatchComponents[I, PrepareRow](ast, planter.fieldClass, extractionBehavior, traceConfig) + .rightOrException() + ( + foreachIdent, + actionQueryAst, + batchActionType, + planter.fieldGetters.asInstanceOf[List[InjectableEagerPlanter[?, PrepareRow, Session]]] + ) // In the case of liftQuery(scalars) // Note, we could have potential other liftQuery(scalars) later in the query for example: // liftQuery(List("Joe","Jack","Jill")).foreach(query[Person].filter(name => liftQuery(1,2,3 /*ids of Joe,Jack,Jill respectively*/).contains(p.id)).update(_.name -> name)) @@ -231,10 +260,19 @@ object QueryExecutionBatchDynamic { case PlanterKind.PrimaryScalarList(planter) => val uuid = java.util.UUID.randomUUID.toString val (foreachReplacementAst, perRowLift) = - (ScalarTag(uuid, Source.Parser), InjectableEagerPlanter((t: Any) => t, planter.encoder.asInstanceOf[io.getquill.generic.GenericEncoder[Any, PrepareRow, Session]], uuid)) + ( + ScalarTag(uuid, Source.Parser), + InjectableEagerPlanter( + (t: Any) => t, + planter.encoder.asInstanceOf[io.getquill.generic.GenericEncoder[Any, PrepareRow, Session]], + uuid + ) + ) // create the full batch-query Ast using the value of actual query of the batch statement i.e. I in: // liftQuery[...](...).foreach(p => query[I].insertValue(p)) - val (foreachIdent, actionQueryAst, batchActionType) = PrepareBatchComponents[I, PrepareRow](ast, foreachReplacementAst, extractionBehavior, traceConfig).rightOrException() + val (foreachIdent, actionQueryAst, batchActionType) = + PrepareBatchComponents[I, PrepareRow](ast, foreachReplacementAst, extractionBehavior, traceConfig) + .rightOrException() // return the combined batch components (foreachIdent, actionQueryAst, batchActionType, List(perRowLift)) } diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchIteration.scala b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchIteration.scala index 1608041f3..4cb589b77 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchIteration.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryExecutionBatchIteration.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -73,15 +73,15 @@ object QueryExecutionBatchIteration { private[getquill] val logger = ContextLogger(classOf[QueryExecutionBatchIteration.type]) def apply[PrepareRow, Session]( - idiom: io.getquill.idiom.Idiom, - query: Unparticular.Query, - perRowLifts: List[SingleEntityLifts], - otherLifts: List[Planter[?, ?, ?]], - originalEntityLifts: List[InjectableEagerPlanter[_, _, _]], - liftingPlaceholder: Int => String, - emptyContainsToken: Token => Token, - batchingBehavior: BatchingBehavior, - extractBehavior: BatchExtractBehavior + idiom: io.getquill.idiom.Idiom, + query: Unparticular.Query, + perRowLifts: List[SingleEntityLifts], + otherLifts: List[Planter[?, ?, ?]], + originalEntityLifts: List[InjectableEagerPlanter[_, _, _]], + liftingPlaceholder: Int => String, + emptyContainsToken: Token => Token, + batchingBehavior: BatchingBehavior, + extractBehavior: BatchExtractBehavior )(traceConfig: TraceConfig): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = new Executor( idiom, @@ -97,16 +97,16 @@ object QueryExecutionBatchIteration { ).apply() private[getquill] class Executor[PrepareRow, Session]( - idiom: io.getquill.idiom.Idiom, - query: Unparticular.Query, - perRowLifts: List[SingleEntityLifts], - otherLifts: List[Planter[?, ?, ?]], - originalEntityLifts: List[InjectableEagerPlanter[_, _, _]], - liftingPlaceholder: Int => String, - emptyContainsToken: Token => Token, - batchingBehavior: BatchingBehavior, - extractBehavior: BatchExtractBehavior, - traceConfig: TraceConfig + idiom: io.getquill.idiom.Idiom, + query: Unparticular.Query, + perRowLifts: List[SingleEntityLifts], + otherLifts: List[Planter[?, ?, ?]], + originalEntityLifts: List[InjectableEagerPlanter[_, _, _]], + liftingPlaceholder: Int => String, + emptyContainsToken: Token => Token, + batchingBehavior: BatchingBehavior, + extractBehavior: BatchExtractBehavior, + traceConfig: TraceConfig ) { def apply(): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = batchingBehavior match { @@ -152,14 +152,21 @@ object QueryExecutionBatchIteration { // Query1: INSERT INTO Person (name, age) VALUES ('Joe', 22), ('Jack', 33), ('Jill', 44) WHERE something=liftedValue // We will have just one group: // Query1: sing:[ SingleEntityLifts([l:'Joe', l:22]), SingleEntityLifts([l:'Jack', l:33]), SingleEntityLifts([l:'Jill', l:44]) ], otherLifts:[liftedValue] - def concatenatedRowIteration(numEntitiesPerQuery: Int): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = { + def concatenatedRowIteration( + numEntitiesPerQuery: Int + ): List[(String, List[(PrepareRow, Session) => (List[Any], PrepareRow)])] = { val totalEntityCount = perRowLifts.length - val templateOfLifts = originalEntityLifts ++ otherLifts + val templateOfLifts = originalEntityLifts ++ otherLifts // if (entitiesCount <= batchSize) // batch(single)(entitiesSize%batchSize) if (totalEntityCount <= numEntitiesPerQuery) { - val (singleGroupQuery, liftsOrderer) = Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, /*valueClauseRepeats*/ totalEntityCount)(traceConfig) + val (singleGroupQuery, liftsOrderer) = Particularize.Dynamic( + query, + templateOfLifts, + liftingPlaceholder, + emptyContainsToken, /*valueClauseRepeats*/ totalEntityCount + )(traceConfig) // Since the entire query will fit into one bach, we don't need to subdivide the batches // just make prepares based on all of the lifts @@ -178,9 +185,15 @@ object QueryExecutionBatchIteration { // The 1st and 2nd that insert 1000 rows each, that's the queryForMostGroups // The 3rd which only inserts 200 i.e. 2200 % batchSize else { - val (anteriorQuery, anteriorLiftsOrderer) = Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, numEntitiesPerQuery)(traceConfig) + val (anteriorQuery, anteriorLiftsOrderer) = + Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, numEntitiesPerQuery)( + traceConfig + ) val lastQueryEntityCount = totalEntityCount % numEntitiesPerQuery - val (lastQuery, lastLiftsOrderer) = Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, lastQueryEntityCount)(traceConfig) + val (lastQuery, lastLiftsOrderer) = + Particularize.Dynamic(query, templateOfLifts, liftingPlaceholder, emptyContainsToken, lastQueryEntityCount)( + traceConfig + ) // println(s"Most Queries: ${numEntitiesPerQuery} Entities, Last Query: ${lastQueryEntityCount} Entities") // Say you have `liftQuery(A,B,C,D,E).foreach(...)` and numEntitiesPerQuery:=2 you need to do the following: @@ -208,7 +221,7 @@ object QueryExecutionBatchIteration { LiftsExtractor.apply[PrepareRow, Session](liftsInThisGroup, row, session) } val lastPrepare = { - val lastEntities = groupedLifts.last + val lastEntities = groupedLifts.last val liftsInThisGroup = lastLiftsOrderer.orderLifts(lastEntities, otherLifts) (row: PrepareRow, session: Session) => LiftsExtractor.apply[PrepareRow, Session](liftsInThisGroup, row, session) @@ -245,9 +258,9 @@ object QueryExecutionBatchIteration { else Left( s"""|The dialect ${idiom.getClass.getName} does not support inserting multiple rows-per-batch (e.g. it cannot support multiple VALUES clauses). - |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server, Sqlite). - |Falling back to the regular single-row-per-batch insert behavior. - |""".stripMargin + |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server, Sqlite). + |Falling back to the regular single-row-per-batch insert behavior. + |""".stripMargin ) } @@ -255,7 +268,9 @@ object QueryExecutionBatchIteration { import io.getquill.context.InsertValueMulti val hasCapability = if (idiom.isInstanceOf[IdiomInsertReturningValueCapability]) - idiom.asInstanceOf[IdiomInsertReturningValueCapability].idiomInsertReturningValuesCapability == InsertReturningValueMulti + idiom + .asInstanceOf[IdiomInsertReturningValueCapability] + .idiomInsertReturningValuesCapability == InsertReturningValueMulti else false @@ -264,10 +279,10 @@ object QueryExecutionBatchIteration { else Left( s"""|The dialect ${idiom.getClass.getName} does not support inserting multiple rows-per-batch (e.g. it cannot support multiple VALUES clauses) - |when batching with query-returns and/or generated-keys. - |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server). - |Falling back to the regular single-row-per-batch insert-returning behavior. - |""".stripMargin + |when batching with query-returns and/or generated-keys. + |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server). + |Falling back to the regular single-row-per-batch insert-returning behavior. + |""".stripMargin ) } @@ -290,9 +305,9 @@ object QueryExecutionBatchIteration { else Left( s"""|Cannot insert multiple (i.e. ${entitiesPerQuery}) rows per-batch-query since the query ${query.basicQuery} has no VALUES clause. - |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server, Sqlite). - |Falling back to the regular single-row-per-batch insert behavior. - |""".stripMargin + |Currently this functionality is only supported for INSERT queries for select databases (Postgres, H2, SQL Server, Sqlite). + |Falling back to the regular single-row-per-batch insert behavior. + |""".stripMargin ) } @@ -300,15 +315,16 @@ object QueryExecutionBatchIteration { val numEntitiesInAllQueries = 1 // Since every batch consists of one row inserted, can use the original InjectableEagerPlanter here to Particularize (i.e. insert the right number of '?' into) the query val liftsInAllGroups = originalEntityLifts ++ otherLifts - val (allGroupsQuery, liftsOrderer) = Particularize.Dynamic(query, liftsInAllGroups, liftingPlaceholder, emptyContainsToken, numEntitiesInAllQueries)(traceConfig) + val (allGroupsQuery, liftsOrderer) = + Particularize.Dynamic(query, liftsInAllGroups, liftingPlaceholder, emptyContainsToken, numEntitiesInAllQueries)( + traceConfig + ) val prepares = - perRowLifts.map { - liftsInThisGroup => - val orderedLifts = liftsOrderer.orderLifts(List(liftsInThisGroup), otherLifts) - { - (row: PrepareRow, session: Session) => - LiftsExtractor.apply[PrepareRow, Session](orderedLifts, row, session) - } + perRowLifts.map { liftsInThisGroup => + val orderedLifts = liftsOrderer.orderLifts(List(liftsInThisGroup), otherLifts) { + (row: PrepareRow, session: Session) => + LiftsExtractor.apply[PrepareRow, Session](orderedLifts, row, session) + } } List((allGroupsQuery, prepares)) } diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryMetaExtractor.scala b/quill-sql/src/main/scala/io/getquill/context/QueryMetaExtractor.scala index 3443ce884..e6bebc658 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QueryMetaExtractor.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryMetaExtractor.scala @@ -7,7 +7,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -30,37 +30,31 @@ import io.getquill.context.Execution.ElaborationBehavior import io.getquill.quat.Quat /** - * A QueryMeta allows contra-mapping some Query[T] to a combination of a Query[R] and then - * an extractor R => T. That is to say a function Query[T] => Query[R] and R => T function - * is automatically swapped in for a Query[T]. + * A QueryMeta allows contra-mapping some Query[T] to a combination of a + * Query[R] and then an extractor R => T. That is to say a function Query[T] => + * Query[R] and R => T function is automatically swapped in for a Query[T]. * - * Internally, we use the term 'quip' (i.e. query + flip) to mean the QueryMeta construct, - * The Query[T] => Query[R] function itself is called the Quipper. - * Since a QueryMeta comes with an R=>M contramap - * function to apply to an extractor we call that the 'baq' since it mapps the inner query back - * from R to T. + * Internally, we use the term 'quip' (i.e. query + flip) to mean the QueryMeta + * construct, The Query[T] => Query[R] function itself is called the Quipper. + * Since a QueryMeta comes with an R=>M contramap function to apply to an + * extractor we call that the 'baq' since it mapps the inner query back from R + * to T. * - * Once the quip is summoned, it is applied to the original user-created query and then called - * a requip (i.e. re-applied quip). That it to say the requip is: - * `FunctionApply(Query[T] => Query[R], Query[R])` + * Once the quip is summoned, it is applied to the original user-created query + * and then called a requip (i.e. re-applied quip). That it to say the requip + * is: `FunctionApply(Query[T] => Query[R], Query[R])` * - * Note that since internally, a QueryMeta carries a Quoted instance, the QueryMeta itself - * is a QuotationLot. For that reason, we call the whole QueryMeta structure a quip-lot. - * (Metaphorically speaking, a 'lot' meta real-estate containing a quip) + * Note that since internally, a QueryMeta carries a Quoted instance, the + * QueryMeta itself is a QuotationLot. For that reason, we call the whole + * QueryMeta structure a quip-lot. (Metaphorically speaking, a 'lot' meta + * real-estate containing a quip) * - * Given a PersonName(name: String) we can define a QueryMeta like this: - * {{ - * inline given QueryMeta[PersonName, String] = - * queryMeta( - * quote { (q: Query[PersonName]) => q.map(p => p.name) } // The Quipper - * )((name: String) => PersonName(name)) // The Baq - * }} - * When we do something like: - * {{ - * inline def people = quote { query[PersonName] } - * val result = ctx.run(people) - * }} - * The Query-Lot AST becomes EntityQuery("Person") + * Given a PersonName(name: String) we can define a QueryMeta like this: {{ + * inline given QueryMeta[PersonName, String] = queryMeta( quote { (q: + * Query[PersonName]) => q.map(p => p.name) } // The Quipper )((name: String) => + * PersonName(name)) // The Baq }} When we do something like: {{ inline def + * people = quote { query[PersonName] } val result = ctx.run(people) }} The + * Query-Lot AST becomes EntityQuery("Person") */ object QueryMetaExtractor { import io.getquill.parser._ @@ -84,9 +78,9 @@ object QueryMetaExtractor { case class StaticRequip[T, R](requip: Expr[Quoted[Query[R]]], baq: Expr[R => T]) def attemptStaticRequip[T: Type, R: Type]( - queryLot: QuotedExpr, - queryLifts: List[PlanterExpr[_, _, _]], - quip: Expr[QueryMeta[T, R]] + queryLot: QuotedExpr, + queryLifts: List[PlanterExpr[_, _, _]], + quip: Expr[QueryMeta[T, R]] )(using Quotes): Option[StaticRequip[T, R]] = { import quotes.reflect.report @@ -127,7 +121,9 @@ object QueryMetaExtractor { // which means that the Context will require a parser as well. That will // make the parser harder to customize by users val reappliedQuery = - '{ Quoted[Query[R]]($astApply, ${ Expr.ofList(newLifts) }, Nil) } // has to be strictly Nil otherwise does not match + '{ + Quoted[Query[R]]($astApply, ${ Expr.ofList(newLifts) }, Nil) + } // has to be strictly Nil otherwise does not match val extractorFunc = '{ $baq.asInstanceOf[R => T] } @@ -141,11 +137,11 @@ object QueryMetaExtractor { } def applyImpl[T: Type, R: Type, D <: io.getquill.idiom.Idiom: Type, N <: io.getquill.NamingStrategy: Type]( - quotedRaw: Expr[Quoted[Query[T]]], - topLevelQuat: Quat + quotedRaw: Expr[Quoted[Query[T]]], + topLevelQuat: Quat )(using Quotes): (Expr[Quoted[Query[R]]], Expr[R => T], Option[StaticState]) = { import quotes.reflect.{Try => TTry, _} - val quotedArg = quotedRaw.asTerm.underlyingArgument.asExprOf[Quoted[Query[T]]] + val quotedArg = quotedRaw.asTerm.underlyingArgument.asExprOf[Quoted[Query[T]]] val summonedMeta = Expr.summon[QueryMeta[T, R]].map(x => x.asTerm.underlyingArgument.asExprOf[QueryMeta[T, R]]) summonedMeta match { case Some(quip) => @@ -156,17 +152,26 @@ object QueryMetaExtractor { attemptStaticRequip[T, R](queryLot, queryLifts, quip) match { case Some(StaticRequip(requip, baq)) => - val staticTranslation = StaticTranslationMacro[D, N](requip, ElaborationBehavior.Elaborate, topLevelQuat) + val staticTranslation = + StaticTranslationMacro[D, N](requip, ElaborationBehavior.Elaborate, topLevelQuat) (requip, baq, staticTranslation) case None => - report.warning(s"Query Was Static but a Dynamic Meta was found: `${io.getquill.util.Format.Expr(quip)}`.This has forced the query to become dynamic!") + report.warning( + s"Query Was Static but a Dynamic Meta was found: `${io.getquill.util.Format.Expr(quip)}`.This has forced the query to become dynamic!" + ) val reappliedAst = '{ FunctionApply($quip.entity.ast, List($quotedArg.ast)) } val requip = - '{ Quoted[Query[R]]($reappliedAst, $quip.entity.lifts ++ $quotedArg.lifts, $quip.entity.runtimeQuotes ++ $quotedArg.runtimeQuotes) } + '{ + Quoted[Query[R]]( + $reappliedAst, + $quip.entity.lifts ++ $quotedArg.lifts, + $quip.entity.runtimeQuotes ++ $quotedArg.runtimeQuotes + ) + } (requip, '{ $quip.extract }, None) } @@ -176,7 +181,13 @@ object QueryMetaExtractor { '{ FunctionApply($quip.entity.ast, List($quotedArg.ast)) } val requip = - '{ Quoted[Query[R]]($reappliedAst, $quip.entity.lifts ++ $quotedArg.lifts, $quip.entity.runtimeQuotes ++ $quotedArg.runtimeQuotes) } + '{ + Quoted[Query[R]]( + $reappliedAst, + $quip.entity.lifts ++ $quotedArg.lifts, + $quip.entity.runtimeQuotes ++ $quotedArg.runtimeQuotes + ) + } (requip, '{ $quip.extract }, None) } diff --git a/quill-sql/src/main/scala/io/getquill/context/QueryMetaMacro.scala b/quill-sql/src/main/scala/io/getquill/context/QueryMetaMacro.scala index 4a148870b..96c089fff 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QueryMetaMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QueryMetaMacro.scala @@ -5,7 +5,9 @@ import io.getquill.parser.ParserFactory import io.getquill._ object QueryMetaMacro { - def embed[T: Type, R: Type](expand: Expr[Quoted[Query[T] => Query[R]]], extract: Expr[R => T])(using Quotes): Expr[QueryMeta[T, R]] = { + def embed[T: Type, R: Type](expand: Expr[Quoted[Query[T] => Query[R]]], extract: Expr[R => T])(using + Quotes + ): Expr[QueryMeta[T, R]] = { val uuid = Expr(java.util.UUID.randomUUID().toString) '{ QueryMeta[T, R]($expand, $uuid, $extract) } } diff --git a/quill-sql/src/main/scala/io/getquill/context/QuoteMacro.scala b/quill-sql/src/main/scala/io/getquill/context/QuoteMacro.scala index e3f8f9a9e..d62deeb4c 100644 --- a/quill-sql/src/main/scala/io/getquill/context/QuoteMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/QuoteMacro.scala @@ -50,12 +50,11 @@ object ExtractLifts { def extractRuntimeUnquotes(body: Expr[Any])(using Quotes) = { import quotes.reflect.report val unquotes = QuotationLotExpr.findUnquotes(body) - unquotes - .collect { - case expr: Pluckable => expr - case Pointable(expr) => - report.throwError(s"Invalid runtime Quotation: ${expr.show}. Cannot extract a unique identifier.", expr) - } + unquotes.collect { + case expr: Pluckable => expr + case Pointable(expr) => + report.throwError(s"Invalid runtime Quotation: ${expr.show}. Cannot extract a unique identifier.", expr) + } .distinctBy(_.uid) .map(_.pluck) } @@ -101,21 +100,23 @@ object QuoteMacro { } val (newAst, transformer) = Transform(List())(ast) - val extracted = transformer.state.reverse + val extracted = transformer.state.reverse val quotations = - extracted.map { - case Extractee(uid, Dynamic(value, quat)) => - val quotation = - value match { - case expr: Expr[_] if (is[Quoted[_]](expr)) => - expr.asExprOf[Quoted[_]] - case expr: Expr[_] => - report.throwError(s"Dynamic value has invalid expression: ${Format.Expr(expr)} in the AST:\n${printAstWithCustom(newAst)(uid, "")}") - case other => - report.throwError(s"Dynamic value is not an expression: ${other} in the AST:\n${printAstWithCustom(newAst)(uid, "")}") - } - - '{ QuotationVase($quotation, ${ Expr(uid) }) } + extracted.map { case Extractee(uid, Dynamic(value, quat)) => + val quotation = + value match { + case expr: Expr[_] if (is[Quoted[_]](expr)) => + expr.asExprOf[Quoted[_]] + case expr: Expr[_] => + report.throwError(s"Dynamic value has invalid expression: ${Format + .Expr(expr)} in the AST:\n${printAstWithCustom(newAst)(uid, "")}") + case other => + report.throwError( + s"Dynamic value is not an expression: ${other} in the AST:\n${printAstWithCustom(newAst)(uid, "")}" + ) + } + + '{ QuotationVase($quotation, ${ Expr(uid) }) } } (newAst, quotations) @@ -128,16 +129,16 @@ object QuoteMacro { // NOTE Can disable underlyingArgument here if needed and make body = bodyRaw. See https://github.com/lampepfl/dotty/pull/8041 for detail val body = bodyRaw.asTerm.underlyingArgument.asExpr - val parser = SummonParser().assemble + val parser = SummonParser().assemble val (serializeQuats, serializeAst) = SummonSerializationBehaviors() - given TranspileConfig = SummonTranspileConfig() + given TranspileConfig = SummonTranspileConfig() - val rawAst = parser(body) + val rawAst = parser(body) val (noDynamicsAst, dynamicQuotes) = DynamicsExtractor(rawAst) - val ast = SimplifyFilterTrue(BetaReduction(noDynamicsAst)) + val ast = SimplifyFilterTrue(BetaReduction(noDynamicsAst)) val reifiedAst = Lifter.WithBehavior(serializeQuats, serializeAst)(ast) - val u = Unlifter(reifiedAst) + val u = Unlifter(reifiedAst) // Extract runtime quotes and lifts val (lifts, pluckedUnquotes) = ExtractLifts(bodyRaw) diff --git a/quill-sql/src/main/scala/io/getquill/context/ReflectiveChainLookup.scala b/quill-sql/src/main/scala/io/getquill/context/ReflectiveChainLookup.scala index 0c30f9f85..cd119a39c 100644 --- a/quill-sql/src/main/scala/io/getquill/context/ReflectiveChainLookup.scala +++ b/quill-sql/src/main/scala/io/getquill/context/ReflectiveChainLookup.scala @@ -12,7 +12,7 @@ import io.getquill.util.Format private[getquill] object ReflectivePathChainLookup { sealed trait LookupElement { def cls: Class[_]; def current: Object } - object LookupElement { + object LookupElement { // For a module class the lookup-object is actualy a class. For example // for: object Foo { object Bar { ... } } you would do: // val submod: Class[Bar] = Class[Foo].getDeclaredClasses.find(_.name endsWith "Bar$") @@ -24,7 +24,7 @@ private[getquill] object ReflectivePathChainLookup { } // end LookupElement case class LookupPath(element: LookupElement, path: String) { - def cls = element.cls + def cls = element.cls def current = element.current } @@ -71,10 +71,12 @@ private[getquill] object ReflectivePathChainLookup { // Get Foo.MODULE$ val submodOpt: Option[Object] = lookupModuleObject(lookup.current)(lookup.cls) // Get Foo.MODULE$.fields. The `Field` unapply can be recycled for this purpose - submodOpt.map(submod => - // I.e. lookup MODULE$.field - lookupFirstMethod(lookup.path)(lookup.cls, submod)("$MODULE.field").map(LookupElement.Value(_)) - ).flatten + submodOpt + .map(submod => + // I.e. lookup MODULE$.field + lookupFirstMethod(lookup.path)(lookup.cls, submod)("$MODULE.field").map(LookupElement.Value(_)) + ) + .flatten } } @@ -83,10 +85,12 @@ private[getquill] object ReflectivePathChainLookup { // Get Foo.MODULE$ val submodOpt: Option[Object] = lookupModuleObject(lookup.current)(lookup.cls) // Get Foo.MODULE$.methods. The `Method` unapply can be recycled for this purpose - submodOpt.map(submod => - // I.e. lookup MODULE$.method - lookupFirstMethod(lookup.path)(lookup.cls, submod)("$MODULE.method").map(LookupElement.Value(_)) - ).flatten + submodOpt + .map(submod => + // I.e. lookup MODULE$.method + lookupFirstMethod(lookup.path)(lookup.cls, submod)("$MODULE.method").map(LookupElement.Value(_)) + ) + .flatten } } @@ -117,7 +121,9 @@ private[getquill] object ReflectivePathChainLookup { case _ => None } - def chainLookup(element: LookupElement, paths: List[String])(pathsSeen: List[String] = List()): Either[String, LookupElement] = { + def chainLookup(element: LookupElement, paths: List[String])( + pathsSeen: List[String] = List() + ): Either[String, LookupElement] = { import StringOps._ paths match { case Nil => Right(element) diff --git a/quill-sql/src/main/scala/io/getquill/context/SchemaMetaMacro.scala b/quill-sql/src/main/scala/io/getquill/context/SchemaMetaMacro.scala index 6f159b532..0029def02 100644 --- a/quill-sql/src/main/scala/io/getquill/context/SchemaMetaMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/SchemaMetaMacro.scala @@ -12,7 +12,10 @@ object SchemaMetaMacro { // inline def schemaMeta[T](inline entity: String, inline columns: (T => (Any, String))*): SchemaMeta[T] = // SchemaMeta(quote { querySchema[T](entity, columns: _*) }, "1234") // TODO Don't need to generate a UID here.It can be static. - def apply[T](entity: Expr[String], columns: Expr[Seq[(T => (Any, String))]])(using Quotes, Type[T]): Expr[SchemaMeta[T]] = { + def apply[T](entity: Expr[String], columns: Expr[Seq[(T => (Any, String))]])(using + Quotes, + Type[T] + ): Expr[SchemaMeta[T]] = { val uuid = Expr(java.util.UUID.randomUUID().toString) val exprs = (columns match { diff --git a/quill-sql/src/main/scala/io/getquill/context/SplicingBehavior.scala b/quill-sql/src/main/scala/io/getquill/context/SplicingBehavior.scala index 0d56db17a..5db39dbfd 100644 --- a/quill-sql/src/main/scala/io/getquill/context/SplicingBehavior.scala +++ b/quill-sql/src/main/scala/io/getquill/context/SplicingBehavior.scala @@ -5,9 +5,9 @@ import scala.quoted._ sealed trait SplicingBehavior object SplicingBehavior { sealed trait FailOnDynamic extends SplicingBehavior - case object FailOnDynamic extends FailOnDynamic - sealed trait AllowDynamic extends SplicingBehavior - case object AllowDynamic extends AllowDynamic + case object FailOnDynamic extends FailOnDynamic + sealed trait AllowDynamic extends SplicingBehavior + case object AllowDynamic extends AllowDynamic } trait SplicingBehaviorHint { diff --git a/quill-sql/src/main/scala/io/getquill/context/StaticSpliceMacro.scala b/quill-sql/src/main/scala/io/getquill/context/StaticSpliceMacro.scala index bef0fdcd5..872bdea17 100644 --- a/quill-sql/src/main/scala/io/getquill/context/StaticSpliceMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/StaticSpliceMacro.scala @@ -23,7 +23,9 @@ object StaticSpliceMacro { import Extractors._ private[getquill] object SelectPath { - def recurseInto(using Quotes)(term: quotes.reflect.Term, accum: List[String] = List()): Option[(quotes.reflect.Term, List[String])] = { + def recurseInto(using + Quotes + )(term: quotes.reflect.Term, accum: List[String] = List()): Option[(quotes.reflect.Term, List[String])] = { import quotes.reflect._ term match { // Recurses through a series of selects do the core identifier e.g: @@ -67,7 +69,9 @@ object StaticSpliceMacro { def isModule(using Quotes)(sym: quotes.reflect.Symbol) = { import quotes.reflect._ val f = sym.flags - f.is(Flags.Module) && !f.is(Flags.Package) && !f.is(Flags.Param) && !f.is(Flags.ParamAccessor) && !f.is(Flags.Method) + f.is(Flags.Module) && !f.is(Flags.Package) && !f.is(Flags.Param) && !f.is(Flags.ParamAccessor) && !f.is( + Flags.Method + ) } object TermIsModule { @@ -116,7 +120,9 @@ object StaticSpliceMacro { case SelectPath(pathRoot, selectPath) => (pathRoot, selectPath) case other => // TODO Long explanatory message about how it has to some value inside object foo inside object bar... and it needs to be a thing compiled in a previous compilation unit - report.throwError(s"Could not load a static value `${Format.Term(value)}` from ${Printer.TreeStructure.show(other)}") + report.throwError( + s"Could not load a static value `${Format.Term(value)}` from ${Printer.TreeStructure.show(other)}" + ) } val (ownerTpe, path) = @@ -127,13 +133,20 @@ object StaticSpliceMacro { case term @ DefTerm(TermOwnerIsModule(owner)) => (owner, pathRoot.symbol.name +: selectPath) case _ => - report.throwError(s"Cannot evaluate the static path ${Format.Term(value)}. Neither it's type ${Format.TypeRepr(pathRoot.tpe)} nor the owner of this type is a static module.") + report.throwError( + s"Cannot evaluate the static path ${Format.Term(value)}. Neither it's type ${Format.TypeRepr(pathRoot.tpe)} nor the owner of this type is a static module." + ) } - val module = Load.Module.fromTypeRepr(ownerTpe).toEither.discardLeft(e => - // TODO Long explanatory message about how it has to some value inside object foo inside object bar... and it needs to be a thing compiled in a previous compilation unit - report.throwError(s"Could not look up {${(ownerTpe)}}.${path.mkString(".")} from the object.\nStatic load failed due to: ${e.stackTraceToString}") - ) + val module = Load.Module + .fromTypeRepr(ownerTpe) + .toEither + .discardLeft(e => + // TODO Long explanatory message about how it has to some value inside object foo inside object bar... and it needs to be a thing compiled in a previous compilation unit + report.throwError( + s"Could not look up {${(ownerTpe)}}.${path.mkString(".")} from the object.\nStatic load failed due to: ${e.stackTraceToString}" + ) + ) val splicedValue = ReflectivePathChainLookup(module, path).discardLeft(msg => @@ -148,8 +161,8 @@ object StaticSpliceMacro { val spliceEither = for { castSplice <- Try(splicedValue.current.asInstanceOf[T]).toEither.mapLeft(e => errorMsg(e.getMessage)) - splicer <- StringCodec.ToSql.summon[T].mapLeft(str => errorMsg(str)) - splice <- Try(splicer.toSql(castSplice)).toEither.mapLeft(e => errorMsg(e.getMessage)) + splicer <- StringCodec.ToSql.summon[T].mapLeft(str => errorMsg(str)) + splice <- Try(splicer.toSql(castSplice)).toEither.mapLeft(e => errorMsg(e.getMessage)) } yield splice val spliceStr = diff --git a/quill-sql/src/main/scala/io/getquill/context/StaticState.scala b/quill-sql/src/main/scala/io/getquill/context/StaticState.scala index fa47ea6f5..7b9902e5f 100644 --- a/quill-sql/src/main/scala/io/getquill/context/StaticState.scala +++ b/quill-sql/src/main/scala/io/getquill/context/StaticState.scala @@ -8,19 +8,20 @@ import io.getquill.metaprog.PlanterExpr import io.getquill.idiom.Idiom case class StaticState( - query: Unparticular.Query, - rawLifts: List[PlanterExpr[?, ?, ?]], - returnAction: Option[ReturnAction], - idiom: Idiom, - // For a batch query, lifts other than the one from the primary liftQuery go here. THey need to be know about separately - // in the batch query case. Should be empty & ignored for non batch cases. - secondaryLifts: List[PlanterExpr[?, ?, ?]] = List() + query: Unparticular.Query, + rawLifts: List[PlanterExpr[?, ?, ?]], + returnAction: Option[ReturnAction], + idiom: Idiom, + // For a batch query, lifts other than the one from the primary liftQuery go here. THey need to be know about separately + // in the batch query case. Should be empty & ignored for non batch cases. + secondaryLifts: List[PlanterExpr[?, ?, ?]] = List() )(queryAst: => Ast) { + /** - * Plant all the lifts and return them. - * NOTE: If this is used frequently would it be worth caching (i.e. since this object is immutable) - * and splicing them might be expensive if it is done over and over again. + * Plant all the lifts and return them. NOTE: If this is used frequently would + * it be worth caching (i.e. since this object is immutable) and splicing them + * might be expensive if it is done over and over again. */ def lifts(using Quotes) = rawLifts.map(_.plant) - def ast: Ast = queryAst + def ast: Ast = queryAst } diff --git a/quill-sql/src/main/scala/io/getquill/context/StaticTranslationMacro.scala b/quill-sql/src/main/scala/io/getquill/context/StaticTranslationMacro.scala index 0017dd636..e0036302e 100644 --- a/quill-sql/src/main/scala/io/getquill/context/StaticTranslationMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/StaticTranslationMacro.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -51,12 +51,12 @@ object StaticTranslationMacro { // Process the AST during compile-time. Return `None` if that can't be done. private[getquill] def processAst( - astExpr: Expr[Ast], - topLevelQuat: Quat, - wrap: ElaborationBehavior, - idiom: Idiom, - naming: NamingStrategy, - foreachIdent: Option[Ident] // identifier of a batch query, if this is a batch query + astExpr: Expr[Ast], + topLevelQuat: Quat, + wrap: ElaborationBehavior, + idiom: Idiom, + naming: NamingStrategy, + foreachIdent: Option[Ident] // identifier of a batch query, if this is a batch query )(using Quotes): Option[(Unparticular.Query, List[External], Option[ReturnAction], Ast)] = { import io.getquill.ast.{CollectAst, QuotationTag} @@ -66,12 +66,12 @@ object StaticTranslationMacro { val unliftedAst = VerifyFreeVariables(Unlifter(astExpr)) val idiomContext = { val transpileConfig = SummonTranspileConfig() - val queryType = IdiomContext.QueryType.discoverFromAst(unliftedAst, foreachIdent.map(_.name)) + val queryType = IdiomContext.QueryType.discoverFromAst(unliftedAst, foreachIdent.map(_.name)) IdiomContext(transpileConfig, queryType) } if (noRuntimeQuotations(unliftedAst)) { - val expandedAst = ElaborateTrivial(wrap)(unliftedAst) + val expandedAst = ElaborateTrivial(wrap)(unliftedAst) val (ast, stmt, _) = idiom.translate(expandedAst, topLevelQuat, ExecutionType.Static, idiomContext)(using naming) val liftColumns = @@ -97,25 +97,28 @@ object StaticTranslationMacro { } // end processAst /** - * There are some cases where we actually do not want to use all of the lifts in a Quoted. - * For example: - * {{ query[Person].insert(_.id -> lift(1), _.name -> lift("Joe")).returningGenerated(_.id)) }} - * becomes something like: - * {{ Quoted(query[Person].insert(_.id -> lift(A), _.name -> lift(B)).returningGenerated(_.id)), lifts: List(ScalarTag(A, 1), ScalarTag(B, "Joe"))) }} - * but since we are excluding the person.id column (this is done in the transformation phase NormalizeReturning which is in SqlNormalization in the quill-sql-portable module) - * actually we only want only the ScalarTag(B) so we need to get the list of lift tags (in tokens) once the Dialect has serialized the query - * which correctly order the list of lifts. A similar issue happens with insertMeta and updateMeta. - * Process compile-time lifts, return `None` if that can't be done. - * liftExprs = Lifts that were put into planters during the quotation. They are - * 're-planted' back into the PreparedStatement vars here. - * matchingExternals = the matching placeholders (i.e 'lift tags') in the AST - * that contains the UUIDs of lifted elements. We check against list to make - * sure that that only needed lifts are used and in the right order. + * There are some cases where we actually do not want to use all of the lifts + * in a Quoted. For example: {{ query[Person].insert(_.id -> lift(1), _.name + * -> lift("Joe")).returningGenerated(_.id)) }} becomes something like: {{ + * Quoted(query[Person].insert(_.id -> lift(A), _.name -> + * lift(B)).returningGenerated(_.id)), lifts: List(ScalarTag(A, 1), + * ScalarTag(B, "Joe"))) }} but since we are excluding the person.id column + * (this is done in the transformation phase NormalizeReturning which is in + * SqlNormalization in the quill-sql-portable module) actually we only want + * only the ScalarTag(B) so we need to get the list of lift tags (in tokens) + * once the Dialect has serialized the query which correctly order the list of + * lifts. A similar issue happens with insertMeta and updateMeta. Process + * compile-time lifts, return `None` if that can't be done. liftExprs = Lifts + * that were put into planters during the quotation. They are 're-planted' + * back into the PreparedStatement vars here. matchingExternals = the matching + * placeholders (i.e 'lift tags') in the AST that contains the UUIDs of lifted + * elements. We check against list to make sure that that only needed lifts + * are used and in the right order. */ private[getquill] def processLifts( - lifts: List[PlanterExpr[_, _, _]], - matchingExternals: List[External], - secondaryLifts: List[PlanterExpr[_, _, _]] = List() + lifts: List[PlanterExpr[_, _, _]], + matchingExternals: List[External], + secondaryLifts: List[PlanterExpr[_, _, _]] = List() )(using Quotes): Either[String, (List[PlanterExpr[_, _, _]], List[PlanterExpr[_, _, _]])] = { import quotes.reflect.report @@ -126,8 +129,8 @@ object StaticTranslationMacro { secondaryLifts.map(e => (e.uid, e)).toMap val uidsOfScalarTags = - matchingExternals.collect { - case tag: ScalarTag => tag.uid + matchingExternals.collect { case tag: ScalarTag => + tag.uid } enum UidStatus { @@ -145,23 +148,22 @@ object StaticTranslationMacro { } val sortedEncodeables = - uidsOfScalarTags - .map { uid => - encodeablesMap.get(uid) match { - case Some(element) => UidStatus.Primary(uid, element) - case None => - secondaryEncodeablesMap.get(uid) match { - case Some(element) => UidStatus.Secondary(uid, element) - case None => UidStatus.NotFound(uid) - } - } + uidsOfScalarTags.map { uid => + encodeablesMap.get(uid) match { + case Some(element) => UidStatus.Primary(uid, element) + case None => + secondaryEncodeablesMap.get(uid) match { + case Some(element) => UidStatus.Secondary(uid, element) + case None => UidStatus.NotFound(uid) + } } + } object HasNotFoundUids { def unapply(statuses: List[UidStatus]) = { val collected = - statuses.collect { - case UidStatus.NotFound(uid) => uid + statuses.collect { case UidStatus.NotFound(uid) => + uid } if (collected.nonEmpty) Some(collected) else None } @@ -174,7 +176,7 @@ object StaticTranslationMacro { case UidStatus.Primary(_, _) => true case _ => false } - val primariesFound = primaries.collect { case p: UidStatus.Primary => p } + val primariesFound = primaries.collect { case p: UidStatus.Primary => p } val secondariesFound = secondaries.collect { case s: UidStatus.Secondary => s } val goodPartitioning = primariesFound.length == primaries.length && secondariesFound.length == secondaries.length @@ -206,21 +208,25 @@ object StaticTranslationMacro { outputEncodeables } // end processLifts - def idiomAndNamingStatic[D <: Idiom, N <: NamingStrategy](using Quotes, Type[D], Type[N]): Try[(Idiom, NamingStrategy)] = + def idiomAndNamingStatic[D <: Idiom, N <: NamingStrategy](using + Quotes, + Type[D], + Type[N] + ): Try[(Idiom, NamingStrategy)] = for { - idiom <- Load.Module[D] + idiom <- Load.Module[D] namingStrategy <- LoadNaming.static[N] } yield (idiom, namingStrategy) def apply[D <: Idiom, N <: NamingStrategy]( - quotedRaw: Expr[Quoted[QAC[?, ?]]], - wrap: ElaborationBehavior, - topLevelQuat: Quat, - // Optional lifts that need to be passed in if they exist e.g. in the liftQuery(...).foreach(p => query[P].filter(pq => pq.id == lift(foo)).updateValue(p)) - // the `lift(foo)` needs to be additionally passed in because it is not part of the original lifts - additionalLifts: List[PlanterExpr[?, ?, ?]] = List(), - // Identifier of the batch query, if this is a batch query - foreachIdent: Option[Ident] = None + quotedRaw: Expr[Quoted[QAC[?, ?]]], + wrap: ElaborationBehavior, + topLevelQuat: Quat, + // Optional lifts that need to be passed in if they exist e.g. in the liftQuery(...).foreach(p => query[P].filter(pq => pq.id == lift(foo)).updateValue(p)) + // the `lift(foo)` needs to be additionally passed in because it is not part of the original lifts + additionalLifts: List[PlanterExpr[?, ?, ?]] = List(), + // Identifier of the batch query, if this is a batch query + foreachIdent: Option[Ident] = None )(using qctx: Quotes, dialectTpe: Type[D], namingType: Type[N]): Option[StaticState] = { import quotes.reflect.{Try => TTry, _} @@ -271,12 +277,16 @@ object StaticTranslationMacro { // recognized by QuotedExpr.uprootableOpt for some reason (quotedExpr, lifts) <- - QuotedExpr.uprootableWithLiftsOpt(quoted).errPrint( - s"Could not uproot (i.e. compile-time extract) the quote: `${Format.Expr(quoted)}`. Make sure it is an `inline def`. If it already is, this may be a quill error." - ) + QuotedExpr + .uprootableWithLiftsOpt(quoted) + .errPrint( + s"Could not uproot (i.e. compile-time extract) the quote: `${Format.Expr(quoted)}`. Make sure it is an `inline def`. If it already is, this may be a quill error." + ) (query, externals, returnAction, ast) <- - processAst(quotedExpr.ast, topLevelQuat, wrap, idiom, naming, foreachIdent).errPrint(s"Could not process the AST:\n${Format.Expr(quotedExpr.ast)}") + processAst(quotedExpr.ast, topLevelQuat, wrap, idiom, naming, foreachIdent).errPrint( + s"Could not process the AST:\n${Format.Expr(quotedExpr.ast)}" + ) (primaryLifts, secondaryLifts) <- processLifts(lifts, externals, additionalLifts).errPrintEither( @@ -336,10 +346,9 @@ object StaticTranslationMacro { if (ProtoMessages.useStdOut) { val posValue = Position.ofMacroExpansion - val pos = s"\nat: ${posValue.sourceFile}:${posValue.startLine + 1}:${posValue.startColumn + 1}" + val pos = s"\nat: ${posValue.sourceFile}:${posValue.startLine + 1}:${posValue.startColumn + 1}" println(msg + pos) - } - else + } else report.info(msg) } // end queryPrint diff --git a/quill-sql/src/main/scala/io/getquill/context/SummonDecoderMacro.scala b/quill-sql/src/main/scala/io/getquill/context/SummonDecoderMacro.scala index de1e40aa1..70e8e4d5e 100644 --- a/quill-sql/src/main/scala/io/getquill/context/SummonDecoderMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/context/SummonDecoderMacro.scala @@ -7,7 +7,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -32,7 +32,9 @@ object SummonDecoderMacro { import scala.quoted._ // Expr.summon is actually from here import io.getquill.Planter - def apply[T: Type, ResultRow: Type, Session: Type](using Quotes): Expr[GenericDecoder[ResultRow, Session, T, DecodingType]] = { + def apply[T: Type, ResultRow: Type, Session: Type](using + Quotes + ): Expr[GenericDecoder[ResultRow, Session, T, DecodingType]] = { import quotes.reflect._ Expr.summon[GenericDecoder[ResultRow, Session, T, DecodingType.Specific]] match { case Some(decoder) => decoder diff --git a/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala b/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala index 556a77e7b..ab3e4849f 100644 --- a/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala +++ b/quill-sql/src/main/scala/io/getquill/context/Unparticular.scala @@ -5,7 +5,7 @@ import scala.language.experimental.macros import java.io.Closeable import scala.compiletime.summonFrom import scala.util.Try -import io.getquill.{ReturnAction} +import io.getquill.ReturnAction import io.getquill.generic.EncodingDsl import io.getquill.Quoted import io.getquill.QueryMeta @@ -36,14 +36,14 @@ object Unparticular { import io.getquill.idiom._ /** - * Query with potentially non enumerate liftQuery(...) statements where set operations - * that look like this: `query[Person].filter(p => liftQuery(scalars).contains(p.name))` - * will look like this "WHERE p.name in (?)" (this is the basicQuery). - * This last "?" actually needs to be expanded - * into a comma separated list coming from the lifted list which is actualy Expr[List[T]] - * but that will be done in the Particularizee(r). The `realQuery` is a tokenized representation - * of the query that can be turned into what it actually will need to look like by the - * Particularize(r) + * Query with potentially non enumerate liftQuery(...) statements where set + * operations that look like this: `query[Person].filter(p => + * liftQuery(scalars).contains(p.name))` will look like this "WHERE p.name in + * (?)" (this is the basicQuery). This last "?" actually needs to be expanded + * into a comma separated list coming from the lifted list which is actualy + * Expr[List[T]] but that will be done in the Particularizee(r). The + * `realQuery` is a tokenized representation of the query that can be turned + * into what it actually will need to look like by the Particularize(r) */ case class Query(basicQuery: String, realQuery: Statement) object Query { @@ -59,21 +59,24 @@ object Unparticular { private def token2string(token: Token, liftingPlaceholder: Int => String): (String, List[External]) = { @tailrec def apply( - workList: List[Token], - sqlResult: Seq[String], - liftingResult: Seq[External], - liftingSize: Int + workList: List[Token], + sqlResult: Seq[String], + liftingResult: Seq[External], + liftingSize: Int ): (String, List[External]) = workList match { case Nil => sqlResult.reverse.mkString("") -> liftingResult.reverse.toList case head :: tail => head match { case StringToken(s2) => apply(tail, s2 +: sqlResult, liftingResult, liftingSize) case SetContainsToken(a, op, b) => apply(stmt"$a $op ($b)" +: tail, sqlResult, liftingResult, liftingSize) - case ScalarTagToken(tag) => apply(tail, liftingPlaceholder(liftingSize) +: sqlResult, tag +: liftingResult, liftingSize + 1) - case Statement(tokens) => apply(tokens.foldRight(tail)(_ +: _), sqlResult, liftingResult, liftingSize) - case ValuesClauseToken(stmt) => apply(stmt +: tail, sqlResult, liftingResult, liftingSize) + case ScalarTagToken(tag) => + apply(tail, liftingPlaceholder(liftingSize) +: sqlResult, tag +: liftingResult, liftingSize + 1) + case Statement(tokens) => apply(tokens.foldRight(tail)(_ +: _), sqlResult, liftingResult, liftingSize) + case ValuesClauseToken(stmt) => apply(stmt +: tail, sqlResult, liftingResult, liftingSize) case _: ScalarLiftToken => - throw new UnsupportedOperationException("Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens.") + throw new UnsupportedOperationException( + "Scalar Lift Tokens are not used in Dotty Quill. Only Scalar Lift Tokens." + ) case _: QuotationTagToken => throw new UnsupportedOperationException("Quotation Tags must be resolved before a reification.") } diff --git a/quill-sql/src/main/scala/io/getquill/context/VerifyFreeVariables.scala b/quill-sql/src/main/scala/io/getquill/context/VerifyFreeVariables.scala index 469a11db6..3ee6ed306 100644 --- a/quill-sql/src/main/scala/io/getquill/context/VerifyFreeVariables.scala +++ b/quill-sql/src/main/scala/io/getquill/context/VerifyFreeVariables.scala @@ -12,17 +12,21 @@ object VerifyFreeVariables { case free if free.isEmpty => Right(ast) case free => val firstVar = free.headOption.map(_.name).getOrElse("someVar") - Left(s""" - |Found the following variables: ${free.map(_.name).toList} that seem to originate outside of a `quote {...}` or `run {...}` - |block. In the AST: - |${Format(ast.toString)} - |Quotes and run blocks cannot use values outside their scope directly (with the exception of inline expressions in Scala 3). - |In order to use runtime values in a quotation, you need to lift them, so instead - |of this `$firstVar` do this: `lift($firstVar)`. - |Here is a more complete example: - |Instead of this: `def byName(n: String) = quote(query[Person].filter(_.name == n))` - | Do this: `def byName(n: String) = quote(query[Person].filter(_.name == lift(n)))` - """.stripMargin) + Left( + s""" + |Found the following variables: ${free + .map(_.name) + .toList} that seem to originate outside of a `quote {...}` or `run {...}` + |block. In the AST: + |${Format(ast.toString)} + |Quotes and run blocks cannot use values outside their scope directly (with the exception of inline expressions in Scala 3). + |In order to use runtime values in a quotation, you need to lift them, so instead + |of this `$firstVar` do this: `lift($firstVar)`. + |Here is a more complete example: + |Instead of this: `def byName(n: String) = quote(query[Person].filter(_.name == n))` + | Do this: `def byName(n: String) = quote(query[Person].filter(_.name == lift(n)))` + """.stripMargin + ) } def apply(ast: Ast)(using Quotes) = { import quotes.reflect._ diff --git a/quill-sql/src/main/scala/io/getquill/context/mirror/ArrayMirrorEncoding.scala b/quill-sql/src/main/scala/io/getquill/context/mirror/ArrayMirrorEncoding.scala index dcca1e7e1..3a68f192e 100644 --- a/quill-sql/src/main/scala/io/getquill/context/mirror/ArrayMirrorEncoding.scala +++ b/quill-sql/src/main/scala/io/getquill/context/mirror/ArrayMirrorEncoding.scala @@ -12,29 +12,32 @@ import scala.collection.Factory trait ArrayMirrorEncoding extends ArrayEncoding { this: SqlMirrorContext[_, _] => - implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = encoder[Col] + implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = encoder[Col] implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = encoder[Col] - implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = encoder[Col] - implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = encoder[Col] - implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = encoder[Col] - implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = encoder[Col] - implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = encoder[Col] - implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = encoder[Col] - implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = encoder[Col] - implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = encoder[Col] - implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = encoder[Col] - implicit def arrayUuidEncoder[Col <: Seq[UUID]]: Encoder[Col] = encoder[Col] + implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = encoder[Col] + implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = encoder[Col] + implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = encoder[Col] + implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = encoder[Col] + implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = encoder[Col] + implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = encoder[Col] + implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = encoder[Col] + implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = encoder[Col] + implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = encoder[Col] + implicit def arrayUuidEncoder[Col <: Seq[UUID]]: Encoder[Col] = encoder[Col] implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = decoderUnsafe[Col] + implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = + decoderUnsafe[Col] + implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = + decoderUnsafe[Col] + implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = decoderUnsafe[Col] + implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = decoderUnsafe[Col] + implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = decoderUnsafe[Col] + implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = decoderUnsafe[Col] + implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = decoderUnsafe[Col] implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = decoderUnsafe[Col] - implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = decoderUnsafe[Col] + implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = decoderUnsafe[Col] + implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = + decoderUnsafe[Col] implicit def arrayUuidDecoder[Col <: Seq[UUID]](implicit bf: Factory[UUID, Col]): Decoder[Col] = decoderUnsafe[Col] } diff --git a/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorDecoders.scala b/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorDecoders.scala index 16c15529c..8d80809f0 100644 --- a/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorDecoders.scala +++ b/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorDecoders.scala @@ -10,8 +10,8 @@ import io.getquill.MappedEncoding trait MirrorDecoders extends EncodingDsl { override type PrepareRow = Row - override type ResultRow = Row - type Decoder[T] = MirrorDecoder[T] + override type ResultRow = Row + type Decoder[T] = MirrorDecoder[T] case class MirrorDecoder[T](decoder: DecoderMethod[T]) extends BaseDecoder[T] { override def apply(index: Int, row: ResultRow, session: Session) = @@ -19,7 +19,7 @@ trait MirrorDecoders extends EncodingDsl { } def decoder[T: ClassTag]: Decoder[T] = - MirrorDecoder((index: Int, row: ResultRow, session: Session) => { + MirrorDecoder { (index: Int, row: ResultRow, session: Session) => val cls = implicitly[ClassTag[T]].runtimeClass if (cls.isPrimitive && row.nullAt(index)) 0.asInstanceOf[T] @@ -27,9 +27,10 @@ trait MirrorDecoders extends EncodingDsl { null.asInstanceOf[T] else row[T](index) - }) + } - def decoderUnsafe[T]: Decoder[T] = MirrorDecoder((index: Int, row: ResultRow, session: Session) => row.data(index).asInstanceOf[T]) + def decoderUnsafe[T]: Decoder[T] = + MirrorDecoder((index: Int, row: ResultRow, session: Session) => row.data(index).asInstanceOf[T]) implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] = MirrorDecoder((index: Int, row: ResultRow, session: Session) => mapped.f(d.apply(index, row, session))) @@ -42,17 +43,17 @@ trait MirrorDecoders extends EncodingDsl { Some(d(index, row, session)) ) - implicit val stringDecoder: Decoder[String] = decoder[String] + implicit val stringDecoder: Decoder[String] = decoder[String] implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder[BigDecimal] - implicit val booleanDecoder: Decoder[Boolean] = decoder[Boolean] - implicit val byteDecoder: Decoder[Byte] = decoder[Byte] - implicit val shortDecoder: Decoder[Short] = decoder[Short] - implicit val intDecoder: Decoder[Int] = decoder[Int] - implicit val longDecoder: Decoder[Long] = decoder[Long] - implicit val floatDecoder: Decoder[Float] = decoder[Float] - implicit val doubleDecoder: Decoder[Double] = decoder[Double] + implicit val booleanDecoder: Decoder[Boolean] = decoder[Boolean] + implicit val byteDecoder: Decoder[Byte] = decoder[Byte] + implicit val shortDecoder: Decoder[Short] = decoder[Short] + implicit val intDecoder: Decoder[Int] = decoder[Int] + implicit val longDecoder: Decoder[Long] = decoder[Long] + implicit val floatDecoder: Decoder[Float] = decoder[Float] + implicit val doubleDecoder: Decoder[Double] = decoder[Double] implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder[Array[Byte]] - implicit val dateDecoder: Decoder[Date] = decoder[Date] - implicit val localDateDecoder: Decoder[LocalDate] = decoder[LocalDate] - implicit val uuidDecoder: Decoder[UUID] = decoder[UUID] + implicit val dateDecoder: Decoder[Date] = decoder[Date] + implicit val localDateDecoder: Decoder[LocalDate] = decoder[LocalDate] + implicit val uuidDecoder: Decoder[UUID] = decoder[UUID] } diff --git a/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorEncoders.scala b/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorEncoders.scala index 9ebfb9ba5..bd03fb2be 100644 --- a/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorEncoders.scala +++ b/quill-sql/src/main/scala/io/getquill/context/mirror/MirrorEncoders.scala @@ -10,39 +10,40 @@ import io.getquill.MappedEncoding // TODO convert int back to index trait MirrorEncoders extends EncodingDsl { override type PrepareRow = Row - override type ResultRow = Row - type Encoder[T] = MirrorEncoder[T] + override type ResultRow = Row + type Encoder[T] = MirrorEncoder[T] case class MirrorEncoder[T](encoder: EncoderMethod[T]) extends BaseEncoder[T] { override def apply(index: Int, value: T, row: PrepareRow, session: Session) = encoder(index, value, row, session) } - def encoder[T]: MirrorEncoder[T] = MirrorEncoder((index: Int, value: T, row: PrepareRow, session: Session) => row.add(value)) + def encoder[T]: MirrorEncoder[T] = + MirrorEncoder((index: Int, value: T, row: PrepareRow, session: Session) => row.add(value)) implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] = MirrorEncoder((index: Int, value: I, row: PrepareRow, session: Session) => e(index, mapped.f(value), row, session)) implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] = - MirrorEncoder((index: Int, value: Option[T], row: PrepareRow, session: Session) => { + MirrorEncoder { (index: Int, value: Option[T], row: PrepareRow, session: Session) => value match { case None => row.add(None) case Some(v) => row.add(d(index, v, Row(), session).data.headOption) } - }) + } - implicit val stringEncoder: Encoder[String] = encoder[String] + implicit val stringEncoder: Encoder[String] = encoder[String] implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal] - implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean] - implicit val byteEncoder: Encoder[Byte] = encoder[Byte] - implicit val shortEncoder: Encoder[Short] = encoder[Short] - implicit val intEncoder: Encoder[Int] = encoder[Int] - implicit val longEncoder: Encoder[Long] = encoder[Long] - implicit val floatEncoder: Encoder[Float] = encoder[Float] - implicit val doubleEncoder: Encoder[Double] = encoder[Double] + implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean] + implicit val byteEncoder: Encoder[Byte] = encoder[Byte] + implicit val shortEncoder: Encoder[Short] = encoder[Short] + implicit val intEncoder: Encoder[Int] = encoder[Int] + implicit val longEncoder: Encoder[Long] = encoder[Long] + implicit val floatEncoder: Encoder[Float] = encoder[Float] + implicit val doubleEncoder: Encoder[Double] = encoder[Double] implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]] - implicit val dateEncoder: Encoder[Date] = encoder[Date] - implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate] - implicit val uuidEncoder: Encoder[UUID] = encoder[UUID] - implicit def nullEncoder: Encoder[Null] = encoder[Null] + implicit val dateEncoder: Encoder[Date] = encoder[Date] + implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate] + implicit val uuidEncoder: Encoder[UUID] = encoder[UUID] + implicit def nullEncoder: Encoder[Null] = encoder[Null] } diff --git a/quill-sql/src/main/scala/io/getquill/context/mirror/Row.scala b/quill-sql/src/main/scala/io/getquill/context/mirror/Row.scala index c2d92fc16..311e91fd2 100644 --- a/quill-sql/src/main/scala/io/getquill/context/mirror/Row.scala +++ b/quill-sql/src/main/scala/io/getquill/context/mirror/Row.scala @@ -8,7 +8,7 @@ object Row { case class Data(key: String, value: Any) @targetName("columns") def apply(values: (String, Any)*) = new Row(values.map((k, v) => Data(k, v)).toList) - def apply(values: Any*) = new Row(values.zipWithIndex.map((v, i) => Data(s"_${i + 1}", v)).toList) + def apply(values: Any*) = new Row(values.zipWithIndex.map((v, i) => Data(s"_${i + 1}", v)).toList) object TupleIndex { def unapply(str: Any): Option[Int] = if (str.toString.matches("_([0-9]+)")) @@ -51,8 +51,8 @@ case class Row(elements: List[Row.Data]) { } } private def nextNumberedRow = maxNumberedRow + 1 - private def nextTupleIndex = s"_${nextNumberedRow}" + private def nextTupleIndex = s"_${nextNumberedRow}" - def add(value: Any) = Row(elements :+ Row.Data(nextTupleIndex, value)) + def add(value: Any) = Row(elements :+ Row.Data(nextTupleIndex, value)) def add(key: String, value: Any) = Row(elements :+ Row.Data(key, value)) } diff --git a/quill-sql/src/main/scala/io/getquill/context/sql/SqlContext.scala b/quill-sql/src/main/scala/io/getquill/context/sql/SqlContext.scala index 1a2fb3a2c..dcae1b8b9 100644 --- a/quill-sql/src/main/scala/io/getquill/context/sql/SqlContext.scala +++ b/quill-sql/src/main/scala/io/getquill/context/sql/SqlContext.scala @@ -9,9 +9,7 @@ import io.getquill.context.Context import io.getquill.NamingStrategy import io.getquill.generic.EncodingDsl -trait SqlContext[+Idiom <: BaseIdiom, +Naming <: NamingStrategy] - extends Context[Idiom, Naming] - with EncodingDsl { +trait SqlContext[+Idiom <: BaseIdiom, +Naming <: NamingStrategy] extends Context[Idiom, Naming] with EncodingDsl { implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] diff --git a/quill-sql/src/main/scala/io/getquill/generic/AnyValEncodingMacro.scala b/quill-sql/src/main/scala/io/getquill/generic/AnyValEncodingMacro.scala index db8682079..7cbe627e5 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/AnyValEncodingMacro.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/AnyValEncodingMacro.scala @@ -15,8 +15,12 @@ trait AnyValDecoderContext[Decoder[_], Mapped] { } object MappedDecoderMaker { - inline def apply[Decoder[_], Mapped <: AnyVal]: AnyValDecoderContext[Decoder, Mapped] => Decoder[Mapped] = ${ applyImpl[Decoder, Mapped] } - def applyImpl[Decoder[_]: Type, Mapped <: AnyVal: Type](using qctx: Quotes): Expr[AnyValDecoderContext[Decoder, Mapped] => Decoder[Mapped]] = { + inline def apply[Decoder[_], Mapped <: AnyVal]: AnyValDecoderContext[Decoder, Mapped] => Decoder[Mapped] = ${ + applyImpl[Decoder, Mapped] + } + def applyImpl[Decoder[_]: Type, Mapped <: AnyVal: Type](using + qctx: Quotes + ): Expr[AnyValDecoderContext[Decoder, Mapped] => Decoder[Mapped]] = { import qctx.reflect._ // try to summon a normal encoder first and see if that works def isAnyValDecoder(term: Term) = @@ -30,12 +34,12 @@ object MappedDecoderMaker { // decoder // case _ => // get the type from the primary constructor and try to summon an encoder for that - val tpe = TypeRepr.of[Mapped] + val tpe = TypeRepr.of[Mapped] val constructor = tpe.typeSymbol.primaryConstructor // TODO Better error describing why the encoder could not be syntheisized if the constructor doesn't exist or has wrong form (i.e. != 1 arg) - val firstParam = tpe.typeSymbol.primaryConstructor.paramSymss(0)(0) + val firstParam = tpe.typeSymbol.primaryConstructor.paramSymss(0)(0) val firstParamField = tpe.typeSymbol.memberField(firstParam.name) - val firstParamType = tpe.memberType(firstParamField) + val firstParamType = tpe.memberType(firstParamField) // println(s"========== First Param Type ${Format.TypeRepr(firstParamType)} of: ${Format.TypeRepr(tpe)} =========") // @@ -55,12 +59,15 @@ object MappedDecoderMaker { } ) } - val out = '{ (ctx: AnyValDecoderContext[Decoder, Mapped]) => ctx.makeMappedDecoder[tt]($mappedDecoding, $enc) } + val out = '{ (ctx: AnyValDecoderContext[Decoder, Mapped]) => + ctx.makeMappedDecoder[tt]($mappedDecoding, $enc) + } // println(s"========== RETURNING Encoder ${Format.TypeRepr(tpe)} => ${Format.TypeRepr(firstParamType)} Consisting of: ${Format.Expr(out)} =========") out case None => report.throwError( - s"Cannot find a regular encoder for the AnyVal type ${Format.TypeRepr(tpe)} or a mapped-encoder for it's base type: ${Format.TypeRepr(firstParamType)}" + s"Cannot find a regular encoder for the AnyVal type ${Format + .TypeRepr(tpe)} or a mapped-encoder for it's base type: ${Format.TypeRepr(firstParamType)}" ) } } @@ -68,8 +75,12 @@ object MappedDecoderMaker { } object MappedEncoderMaker { - inline def apply[Encoder[_], Mapped <: AnyVal]: AnyValEncoderContext[Encoder, Mapped] => Encoder[Mapped] = ${ applyImpl[Encoder, Mapped] } - def applyImpl[Encoder[_]: Type, Mapped <: AnyVal: Type](using qctx: Quotes): Expr[AnyValEncoderContext[Encoder, Mapped] => Encoder[Mapped]] = { + inline def apply[Encoder[_], Mapped <: AnyVal]: AnyValEncoderContext[Encoder, Mapped] => Encoder[Mapped] = ${ + applyImpl[Encoder, Mapped] + } + def applyImpl[Encoder[_]: Type, Mapped <: AnyVal: Type](using + qctx: Quotes + ): Expr[AnyValEncoderContext[Encoder, Mapped] => Encoder[Mapped]] = { import qctx.reflect._ def isAnyValEncoder(term: Term) = @@ -96,20 +107,25 @@ object MappedEncoderMaker { report.throwError(s"not matched: ${Format.TypeRepr(tpe.dealias)}") } val firstParamField = tpe.typeSymbol.memberField(firstParam.name) - val firstParamType = tpe.memberType(firstParamField) + val firstParamType = tpe.memberType(firstParamField) // Try to summon an encoder from the first param type firstParamType.asType match { case '[tt] => Expr.summon[Encoder[tt]] match { case Some(enc) => - val mappedEncoding = '{ MappedEncoding((v: Mapped) => ${ Select('v.asTerm, firstParamField).asExprOf[tt] }) } - val out = '{ (ctx: AnyValEncoderContext[Encoder, Mapped]) => ctx.makeMappedEncoder[tt]($mappedEncoding, $enc) } + val mappedEncoding = '{ + MappedEncoding((v: Mapped) => ${ Select('v.asTerm, firstParamField).asExprOf[tt] }) + } + val out = '{ (ctx: AnyValEncoderContext[Encoder, Mapped]) => + ctx.makeMappedEncoder[tt]($mappedEncoding, $enc) + } // println(s"========== RETURNING Encoder ${Format.TypeRepr(tpe)} => ${Format.TypeRepr(firstParamType)} Consisting of: ${Format.Expr(out)} =========") out case None => report.throwError( - s"Cannot find a regular encoder for the AnyVal type ${Format.TypeRepr(tpe)} or a mapped-encoder for it's base type: ${Format.TypeRepr(firstParamType)}" + s"Cannot find a regular encoder for the AnyVal type ${Format + .TypeRepr(tpe)} or a mapped-encoder for it's base type: ${Format.TypeRepr(firstParamType)}" ) } } @@ -122,10 +138,10 @@ object AnyValToValMacro { inline def apply[Cls <: AnyVal, V]: MappedEncoding[Cls, V] = ${ applyImpl[Cls, V] } def applyImpl[Cls <: AnyVal: Type, V: Type](using qctx: Quotes): Expr[MappedEncoding[Cls, V]] = { import qctx.reflect._ - val tpe = TypeRepr.of[Cls] - val firstParam = tpe.typeSymbol.primaryConstructor.paramSymss(0)(0) + val tpe = TypeRepr.of[Cls] + val firstParam = tpe.typeSymbol.primaryConstructor.paramSymss(0)(0) val firstParamField = tpe.typeSymbol.memberField(firstParam.name) - val firstParamType = tpe.memberType(firstParamField) + val firstParamType = tpe.memberType(firstParamField) // println("Member type of 1st param: " + io.getquill.util.Format.TypeRepr(firstParamType)) '{ MappedEncoding((v: Cls) => ${ Select('v.asTerm, firstParamField).asExprOf[V] }) } } @@ -137,7 +153,7 @@ object ValToAnyValMacro { inline def apply[V, Cls <: AnyVal]: MappedEncoding[V, Cls] = ${ applyImpl[V, Cls] } def applyImpl[V: Type, Cls <: AnyVal: Type](using qctx: Quotes): Expr[MappedEncoding[V, Cls]] = { import qctx.reflect._ - val tpe = TypeRepr.of[Cls] + val tpe = TypeRepr.of[Cls] val constructor = tpe.typeSymbol.primaryConstructor '{ MappedEncoding((v: V) => diff --git a/quill-sql/src/main/scala/io/getquill/generic/ArrayEncoding.scala b/quill-sql/src/main/scala/io/getquill/generic/ArrayEncoding.scala index 8946d5cbc..ad8beda20 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/ArrayEncoding.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/ArrayEncoding.scala @@ -43,25 +43,19 @@ trait ArrayEncoding extends EncodingDsl { implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] implicit def arrayUuidDecoder[Col <: Seq[UUID]](implicit bf: Factory[UUID, Col]): Decoder[Col] - implicit def arrayMappedEncoder[I, O, Col[X] <: Seq[X]]( - implicit - mapped: MappedEncoding[I, O], - e: Encoder[Seq[O]] - ): Encoder[Col[I]] = { + implicit def arrayMappedEncoder[I, O, Col[X] <: Seq[X]](implicit + mapped: MappedEncoding[I, O], + e: Encoder[Seq[O]] + ): Encoder[Col[I]] = mappedEncoder[Col[I], Seq[O]](MappedEncoding((col: Col[I]) => col.map(mapped.f)), e) - } - implicit def arrayMappedDecoder[I, O, Col[X] <: Seq[X]]( - implicit - mapped: MappedEncoding[I, O], - d: Decoder[Seq[I]], - bf: Factory[O, Col[O]] - ): Decoder[Col[O]] = { + implicit def arrayMappedDecoder[I, O, Col[X] <: Seq[X]](implicit + mapped: MappedEncoding[I, O], + d: Decoder[Seq[I]], + bf: Factory[O, Col[O]] + ): Decoder[Col[O]] = mappedDecoder[Seq[I], Col[O]]( - MappedEncoding((col: Seq[I]) => - col.foldLeft(bf.newBuilder)((b, x) => b += mapped.f(x)).result - ), + MappedEncoding((col: Seq[I]) => col.foldLeft(bf.newBuilder)((b, x) => b += mapped.f(x)).result), d ) - } } diff --git a/quill-sql/src/main/scala/io/getquill/generic/ConstructType.scala b/quill-sql/src/main/scala/io/getquill/generic/ConstructType.scala index 8fa5f2473..04aabbc71 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/ConstructType.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/ConstructType.scala @@ -12,7 +12,7 @@ object ConstructType { val types = children.map(_._1) val terms = children.map(_._2) // Get the constructor - val tpe = TypeRepr.of[T] + val tpe = TypeRepr.of[T] val constructor = TypeRepr.of[T].typeSymbol.primaryConstructor // println(s"tpe: ${tpe}, constructor: ${constructor}") @@ -34,7 +34,9 @@ object ConstructType { // println(s"=========== Create from Tuple Constructor ${Format.Expr(construct.asExprOf[T])} ===========") construct.asExprOf[T] // If we are a case class with no generic parameters, we can easily construct it - } else if (tpe.classSymbol.exists(_.flags.is(Flags.Case)) && !constructor.paramSymss.exists(_.exists(_.isTypeParam))) { + } else if ( + tpe.classSymbol.exists(_.flags.is(Flags.Case)) && !constructor.paramSymss.exists(_.exists(_.isTypeParam)) + ) { val construct = Apply( Select(New(TypeTree.of[T]), constructor), @@ -48,4 +50,4 @@ object ConstructType { '{ $m.fromProduct(${ Expr.ofTupleFromSeq(terms) }) }.asExprOf[T] } } // end apply -} // end ConstructType +} // end ConstructType diff --git a/quill-sql/src/main/scala/io/getquill/generic/DeconstructElaboratedEntityLevels.scala b/quill-sql/src/main/scala/io/getquill/generic/DeconstructElaboratedEntityLevels.scala index 9e076efe5..469def084 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/DeconstructElaboratedEntityLevels.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/DeconstructElaboratedEntityLevels.scala @@ -43,27 +43,30 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes } val transpileConfig = SummonTranspileConfig() - val interp = new Interpolator2(TraceType.Elaboration, transpileConfig.traceConfig, 1) + val interp = new Interpolator2(TraceType.Elaboration, transpileConfig.traceConfig, 1) import interp._ sealed trait ElaboratedField object ElaboratedField { private def create(tpe: TypeRepr, fieldName: String) = { val typeSymbol = tpe.typeSymbol - typeSymbol.methodMembers.find(m => m.name == fieldName && m.paramSymss == List()).map(ZeroArgsMethod(_)) + typeSymbol.methodMembers + .find(m => m.name == fieldName && m.paramSymss == List()) + .map(ZeroArgsMethod(_)) .orElse(typeSymbol.fieldMembers.find(m => m.name == fieldName).map(Field(_))) .getOrElse(NotFound) } case class ZeroArgsMethod(symbol: Symbol) extends ElaboratedField - case class Field(symbol: Symbol) extends ElaboratedField - case object NotFound extends ElaboratedField + case class Field(symbol: Symbol) extends ElaboratedField + case object NotFound extends ElaboratedField def resolve(tpe: TypeRepr, fieldName: String, term: Term) = ElaboratedField.create(tpe, fieldName) match { case ZeroArgsMethod(sym) => (sym, tpe.widen.memberType(sym).widen) case Field(sym) => (sym, tpe.widen.memberType(sym).widen) - case NotFound => report.throwError(s"Cannot find the field (or zero-args method) $fieldName in the ${tpe.show} term: $term") + case NotFound => + report.throwError(s"Cannot find the field (or zero-args method) $fieldName in the ${tpe.show} term: $term") } } // end ElaboratedField @@ -71,7 +74,8 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes def apply[ProductCls: Type](elaboration: Term): List[(Term, Expr[ProductCls] => Expr[_], Type[_])] = { // Don't know if a case where the top-level elaborate thing can be an optional but still want to add the check val topLevelOptional = isOption[ProductCls] - recurseNest[ProductCls](elaboration, topLevelOptional).asInstanceOf[List[(Term, Expr[ProductCls] => Expr[_], Type[_])]] + recurseNest[ProductCls](elaboration, topLevelOptional) + .asInstanceOf[List[(Term, Expr[ProductCls] => Expr[_], Type[_])]] } // TODO Do we need to include flattenOptions? @@ -89,7 +93,7 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes trace"Computed Elaborations: ${elaborations.map(_._3).map(io.getquill.util.Format.TypeRepr(_))}".andLog() elaborations.flatMap { (fieldTerm, fieldGetter, fieldTypeRepr) => - val fieldType = fieldTypeRepr.widen.asType + val fieldType = fieldTypeRepr.widen.asType val nextIsOptional = optionalAncestor || isOption[Cls] fieldType match { case '[ft] => @@ -108,7 +112,9 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes // Note that getting the types to line up here is very tricky. val output = recurseNest[ft](fieldTerm, nextIsOptional).map { (childTerm, childField, childType) => - trace"Computing child term ${Format.TypeOf[Cls]}.${fieldTerm.name}:${Format.Type(fieldType)} -> ${childTerm.name}:${Format.Type(childType)}".andLog() + trace"Computing child term ${Format + .TypeOf[Cls]}.${fieldTerm.name}:${Format.Type(fieldType)} -> ${childTerm.name}:${Format.Type(childType)}" + .andLog() val pathToField = childType match { @@ -130,7 +136,9 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes (childTerm, pathToField, computedChildType) } - trace"Nested Getters: ${output.map((term, getter, tpe) => (term.name, Format.Expr('{ (outerClass: Cls) => ${ getter('outerClass) } })))}".andLog() + trace"Nested Getters: ${output.map((term, getter, tpe) => + (term.name, Format.Expr('{ (outerClass: Cls) => ${ getter('outerClass) } })) + )}".andLog() output.asInstanceOf[List[(Term, Expr[Any] => Expr[_], Type[_])]] } } @@ -138,8 +146,8 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes } def resovePathToField[Cls: Type, ChildType: Type, FieldType: Type]( - fieldGetter: Expr[Cls] => Expr[?], - childField: Expr[?] => Expr[?] + fieldGetter: Expr[Cls] => Expr[?], + childField: Expr[?] => Expr[?] ): Expr[Cls] => Expr[?] = { val pathToField = Type.of[Cls] match { @@ -168,20 +176,24 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes Type.of[ChildType] match { case '[Option[nt]] => val castFieldGetter = fieldGetter.asInstanceOf[Expr[Any] => Expr[Option[FieldType]]] - val castNextField = childField.asInstanceOf[Expr[FieldType] => Expr[Option[nt]]] - trace"Trying Cls as Option[${Format.TypeOf[cls]}].flatMap, childType as Option[${Format.TypeOf[nt]}]".andLog() + val castNextField = childField.asInstanceOf[Expr[FieldType] => Expr[Option[nt]]] + trace"Trying Cls as Option[${Format.TypeOf[cls]}].flatMap, childType as Option[${Format.TypeOf[nt]}]" + .andLog() (outerClass: Expr[Cls]) => '{ ${ castFieldGetter(outerClass) }.flatMap[nt](flattenClsVal => ${ castNextField('flattenClsVal) }) } case '[nt] => val castFieldGetter = fieldGetter.asInstanceOf[Expr[Any] => Expr[Option[FieldType]]] - val castNextField = childField.asInstanceOf[Expr[FieldType] => Expr[nt]] + val castNextField = childField.asInstanceOf[Expr[FieldType] => Expr[nt]] trace"Trying Cls as Option[${Format.TypeOf[cls]}].map, childType as Option[${Format.TypeOf[nt]}]".andLog() (outerClass: Expr[Cls]) => '{ ${ castFieldGetter(outerClass) }.map[nt](clsVal => ${ castNextField('clsVal) }) } } case _ => // e.g. nest Person => Person.name into Name => Name.first to get Person => Person.name.first - val castFieldGetter = fieldGetter.asInstanceOf[Expr[Any] => Expr[_]] // e.g. Person => Person.name (where name is a case class Name(first: String, last: String)) + val castFieldGetter = + fieldGetter.asInstanceOf[Expr[Any] => Expr[ + _ + ]] // e.g. Person => Person.name (where name is a case class Name(first: String, last: String)) val castNextField = childField.asInstanceOf[Expr[Any] => Expr[_]] // e.g. Name => Name.first (outerClass: Expr[Cls]) => castNextField(castFieldGetter(outerClass)) } @@ -208,7 +220,7 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes } private[getquill] def elaborateObjectOneLevel[Cls: Type](node: Term): List[(Term, Expr[Cls] => Expr[_], TypeRepr)] = { - val clsType = TypeRepr.of[Cls] + val clsType = TypeRepr.of[Cls] val typeIsOptional = TypeRepr.of[Cls] <:< TypeRepr.of[Option[Any]] trace"Elaborating one level. ${node.name} of ${Format.TypeOf[Cls]}" node match { @@ -227,8 +239,10 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes // exclude it if it does not trace"(Node Non-Option) Mapping props of: ${Format.TypeOf[Cls]} is ${childProps.map(_.name)}".andLog() childProps.map { childTerm => - val (memberSymbol, memberType) = ElaboratedField.resolve(clsType, childTerm.name, childTerm) // for Person, Person.name.type - trace"(Node Non-Option) MemField of: ${childTerm.name} is `${memberSymbol}:${Printer.TypeReprShortCode.show(memberType)}`".andLog() + val (memberSymbol, memberType) = + ElaboratedField.resolve(clsType, childTerm.name, childTerm) // for Person, Person.name.type + trace"(Node Non-Option) MemField of: ${childTerm.name} is `${memberSymbol}:${Printer.TypeReprShortCode.show(memberType)}`" + .andLog() memberType.asType match { case '[t] => val expr = (field: Expr[Cls]) => (field `.(caseField)` (childTerm.name)).asExprOf[t] @@ -249,47 +263,60 @@ private[getquill] class DeconstructElaboratedEntityLevels(using val qctx: Quotes // TODO For coproducts need to check that the childName method actually exists on the type and // exclude it if it does not trace"(Node Option) Mapping props of: ${Format.TypeOf[Cls]} is ${childProps.map(_.name)}".andLog() - childProps.map { - childTerm => - - // In order to be able to flatten optionals in the flattenOptionals later, we need to make - // sure that the method-type in the .map function below is 100% correct. That means we - // need to lookup what the type of the field of this particular member should actually be. - val rootType = `Option[...[t]...]`.innerT(Type.of[Cls]) - val rootTypeRepr = - rootType match { - case '[t] => TypeRepr.of[t] - } - val (memField, memeType) = ElaboratedField.resolve(rootTypeRepr, childTerm.name, childTerm) - trace"(Node Option) MemField of: ${childTerm.name} is ${memField}: ${Printer.TypeReprShortCode.show(memeType)}".andLog() - (Type.of[Cls], rootType) match { - case ('[cls], '[root]) => - memeType.asType match { - // If the nested field is itself optional, need to account for immediate flattening - case '[Option[mt]] => - val expr = (optField: Expr[cls]) => '{ ${ flattenOptions(optField).asExprOf[Option[root]] }.flatMap[mt](optionProp => ${ ('optionProp `.(caseField)` (childTerm.name)).asExprOf[Option[mt]] }) } - lazy val traceInput = '{ (optField: cls) => ${ expr('optField) } } - trace"(Node Option) '[Option[mt]] Mapping: asExprOf: ${childTerm.name} into ${Format.TypeOf[Option[mt]]} in ${Format.Expr(traceInput)}".andLog() - ( - childTerm, - expr.asInstanceOf[Expr[Cls] => Expr[_]], - memeType - ) - case '[mt] => - val expr = (optField: Expr[cls]) => '{ ${ flattenOptions(optField).asExprOf[Option[root]] }.map[mt](regProp => ${ ('regProp `.(caseField)` (childTerm.name)).asExprOf[mt] }) } - lazy val traceInput = '{ (optField: cls) => ${ expr('optField) } } - trace"(Node Option) ['mt] Mapping: asExprOf: ${childTerm.name} into ${Format.TypeOf[mt]} in ${Format.Expr(traceInput)}".andLog() - ( - childTerm, - expr.asInstanceOf[Expr[Cls] => Expr[_]], - memeType - ) - } + childProps.map { childTerm => + // In order to be able to flatten optionals in the flattenOptionals later, we need to make + // sure that the method-type in the .map function below is 100% correct. That means we + // need to lookup what the type of the field of this particular member should actually be. + val rootType = `Option[...[t]...]`.innerT(Type.of[Cls]) + val rootTypeRepr = + rootType match { + case '[t] => TypeRepr.of[t] } + val (memField, memeType) = ElaboratedField.resolve(rootTypeRepr, childTerm.name, childTerm) + trace"(Node Option) MemField of: ${childTerm.name} is ${memField}: ${Printer.TypeReprShortCode.show(memeType)}" + .andLog() + (Type.of[Cls], rootType) match { + case ('[cls], '[root]) => + memeType.asType match { + // If the nested field is itself optional, need to account for immediate flattening + case '[Option[mt]] => + val expr = (optField: Expr[cls]) => + '{ + ${ flattenOptions(optField).asExprOf[Option[root]] }.flatMap[mt](optionProp => + ${ ('optionProp `.(caseField)` (childTerm.name)).asExprOf[Option[mt]] } + ) + } + lazy val traceInput = '{ (optField: cls) => ${ expr('optField) } } + trace"(Node Option) '[Option[mt]] Mapping: asExprOf: ${childTerm.name} into ${Format + .TypeOf[Option[mt]]} in ${Format.Expr(traceInput)}".andLog() + ( + childTerm, + expr.asInstanceOf[Expr[Cls] => Expr[_]], + memeType + ) + case '[mt] => + val expr = (optField: Expr[cls]) => + '{ + ${ flattenOptions(optField).asExprOf[Option[root]] }.map[mt](regProp => + ${ ('regProp `.(caseField)` (childTerm.name)).asExprOf[mt] } + ) + } + lazy val traceInput = '{ (optField: cls) => ${ expr('optField) } } + trace"(Node Option) ['mt] Mapping: asExprOf: ${childTerm.name} into ${Format.TypeOf[mt]} in ${Format.Expr(traceInput)}" + .andLog() + ( + childTerm, + expr.asInstanceOf[Expr[Cls] => Expr[_]], + memeType + ) + } + } } case _ => - report.throwError(s"Illegal state during reducing expression term: '${node}' and type: '${io.getquill.util.Format.TypeRepr(clsType)}'") + report.throwError( + s"Illegal state during reducing expression term: '${node}' and type: '${io.getquill.util.Format.TypeRepr(clsType)}'" + ) } // end match } diff --git a/quill-sql/src/main/scala/io/getquill/generic/ElaborateStructure.scala b/quill-sql/src/main/scala/io/getquill/generic/ElaborateStructure.scala index e8d5aa716..76cd86630 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/ElaborateStructure.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/ElaborateStructure.scala @@ -20,11 +20,12 @@ import zio.Chunk import io.getquill.metaprog.Extractors /** - * Elaboration can be different whether we are encoding or decoding because we could have - * decoders for certain things that we don't have encoders for and vice versa. That means - * that the potentially something encoded as a value would be decoded as a case-class - * or vice versa. Therefore, we need to differentiate whether elaboration is used on the - * encoding side or the decoding side. + * Elaboration can be different whether we are encoding or decoding because we + * could have decoders for certain things that we don't have encoders for and + * vice versa. That means that the potentially something encoded as a value + * would be decoded as a case-class or vice versa. Therefore, we need to + * differentiate whether elaboration is used on the encoding side or the + * decoding side. */ enum ElaborationSide { case Encoding @@ -32,9 +33,10 @@ enum ElaborationSide { } /** - * Based on valueComputation and materializeQueryMeta from the old Quill - * This was around to flesh-out details of the outermost AST of a query based on the fields of the - * object T in Query[T] that the AST represents. For an example say we have something like this: + * Based on valueComputation and materializeQueryMeta from the old Quill This + * was around to flesh-out details of the outermost AST of a query based on the + * fields of the object T in Query[T] that the AST represents. For an example + * say we have something like this: * {{{ * import io.getquill.ast.{ Ident => Id, Property => Prop, _ } * case class Person(name: String, age: Int) @@ -44,9 +46,10 @@ enum ElaborationSide { * {{{ * Map(EntityQuery("Person"), Id("p"), Id("p")) * }}} - * This query needs to be turned into `SELECT p.name, p.age from Person p`, the problem is, before - * Quats, Quill did not actually know how to expand `Ident("p")` into `SelectValue(p.name), SelectValue(p.age)` - * (see SqlQuery.scala) since there was no type information. Therefore... + * This query needs to be turned into `SELECT p.name, p.age from Person p`, the + * problem is, before Quats, Quill did not actually know how to expand + * `Ident("p")` into `SelectValue(p.name), SelectValue(p.age)` (see + * SqlQuery.scala) since there was no type information. Therefore... * {{{ * // We needed to convert something that looks like this: * query[Person].map(p => p) // i.e. Map(EntityQuery("Person"), Id("p"), Id("p")) @@ -55,12 +58,13 @@ enum ElaborationSide { * query[Person].map(p => p).map(p => (p.name, p.age)) * // i.e. Map(Map(EntityQuery("Person"), Ident("p"), Ident("p")), Tuple(Prop(Id("p"),"name"), Prop(Id("p"),"age"))) * }}} - * This makes it easier to translate the above information into the finalized form + * This makes it easier to translate the above information into the finalized + * form * {{{ * SELECT p.name, p.age FROM (SELECT p.* from Person p) AS p * }}} - * (Note that redudant map would typically be flattened out since it is extraneous and the inner - * SELECT would no longer be present) + * (Note that redudant map would typically be flattened out since it is + * extraneous and the inner SELECT would no longer be present) * * Some special provisions were made for fields inside optional objects: * {{{ @@ -72,10 +76,11 @@ enum ElaborationSide { * query[Person].map(p => (p.name, p.address.map(_.street), p.address.map(_.zip))) * }}} * - * Now, since Quats were introduced into Quill since 3.6.0 (technically since 3.5.3), this step is not necessarily needed - * for query expansion since `Ident("p")` is now understood to expand into its corresponding SelectValue fields so for queries, - * this stage could technically be elimiated. However, this logic is also useful for ActionMeta where we have - * something like this: + * Now, since Quats were introduced into Quill since 3.6.0 (technically since + * 3.5.3), this step is not necessarily needed for query expansion since + * `Ident("p")` is now understood to expand into its corresponding SelectValue + * fields so for queries, this stage could technically be elimiated. However, + * this logic is also useful for ActionMeta where we have something like this: * {{{ * case class Person(name: String, age: Int) * // This: @@ -88,19 +93,20 @@ enum ElaborationSide { * Assignment(Id("x1"), Prop(Id("x1"), "name"), Constant(44)) * ) * }}} - * The fact that we know that Person expands into Prop(Id("p"),"name"), Prop(Id("p"),"age")) helps - * us compute the necessary assignments in the `InsertUpdateMacro`. + * The fact that we know that Person expands into Prop(Id("p"),"name"), + * Prop(Id("p"),"age")) helps us compute the necessary assignments in the + * `InsertUpdateMacro`. */ object ElaborateStructure { import io.getquill.generic.GenericDecoder sealed trait TermType - case object Leaf extends TermType + case object Leaf extends TermType case object Branch extends TermType // TODO Good use-case for zio-chunk case class TermPath(terms: List[Term]) { - def append(term: Term) = this.copy(terms = this.terms :+ term) + def append(term: Term) = this.copy(terms = this.terms :+ term) def concat(path: TermPath) = this.copy(terms = this.terms ++ path.terms) def mkString(separator: String = "", dropFirst: Boolean = true) = (if (dropFirst) terms.drop(1) else terms).map(_.name).mkString(separator) @@ -112,8 +118,8 @@ object ElaborateStructure { // TODO Rename to Structure case class Term(name: String, typeType: TermType, children: List[Term] = List(), optional: Boolean = false) { def withChildren(children: List[Term]) = this.copy(children = children) - def toAst = Term.toAstTop(this) - def asLeaf = this.copy(typeType = Leaf, children = List()) + def toAst = Term.toAstTop(this) + def asLeaf = this.copy(typeType = Leaf, children = List()) def paths = { def pathsRecurse(node: Term, topLevel: Boolean = false): List[String] = { def emptyIfTop(str: String) = if (topLevel) "" else str @@ -135,13 +141,19 @@ object ElaborateStructure { // Terms must both have the same name if (this.name != other.name) - report.throwError(s"Cannot resolve coproducts because terms ${this} and ${other} have different names") // TODO Describe this as better error messages for users? + report.throwError( + s"Cannot resolve coproducts because terms ${this} and ${other} have different names" + ) // TODO Describe this as better error messages for users? if (this.optional != other.optional) - report.throwError(s"Cannot resolve coproducts because one of the terms ${this} and ${other} is optional and the other is not") + report.throwError( + s"Cannot resolve coproducts because one of the terms ${this} and ${other} is optional and the other is not" + ) if (this.typeType != other.typeType) - report.throwError(s"Cannot resolve coproducts because the terms ${this} and ${other} have different types (${this.typeType} and ${other.typeType} respectively)") + report.throwError( + s"Cannot resolve coproducts because the terms ${this} and ${other} have different types (${this.typeType} and ${other.typeType} respectively)" + ) import io.getquill.util.GroupByOps._ // Given Shape -> (Square, Rectangle) the result will be: @@ -152,14 +164,16 @@ object ElaborateStructure { // with a Square.height and a Rectagnle.height, both 'height' fields but be a Leaf (and also in the future will need to have the same data type) // TODO Need to add datatype to Term so we can also verify types are the same for the coproducts val newChildren = - orderedGroupBy.map((term, values) => { + orderedGroupBy.map { (term, values) => val distinctValues = values.distinct if (distinctValues.length > 1) - report.throwError(s"Invalid coproduct at: ${TypeRepr.of[T].widen.typeSymbol.name}.${term} detected multiple kinds of values: ${distinctValues}") + report.throwError( + s"Invalid coproduct at: ${TypeRepr.of[T].widen.typeSymbol.name}.${term} detected multiple kinds of values: ${distinctValues}" + ) // TODO Check if there are zero? distinctValues.head - }).toList + }.toList this.copy(children = newChildren) } @@ -193,9 +207,7 @@ object ElaborateStructure { case Term(name, tt, list, false) => val output = - list.flatMap(elem => - toAst(elem, property(parent, name, tt)) - ) + list.flatMap(elem => toAst(elem, property(parent, name, tt))) // Add field name to the output output.map((ast, childName) => (ast, name + childName)) @@ -204,7 +216,7 @@ object ElaborateStructure { val idV = Ident("v", Quat.Generic) // TODO Specific quat inference val output = for { - elem <- list + elem <- list (newAst, subName) <- toAst(elem, idV) } yield (OptionMap(property(parent, name, Leaf), idV, newAst), subName) // Add field name to the output @@ -215,7 +227,7 @@ object ElaborateStructure { val idV = Ident("v", Quat.Generic) val output = for { - elem <- list + elem <- list (newAst, subName) <- toAst(elem, idV) } yield (OptionTableMap(property(parent, name, Branch), idV, newAst), subName) @@ -224,15 +236,15 @@ object ElaborateStructure { } /** - * Top-Level expansion of a Term is slighly different the later levels. A the top it's always ident.map(id => ...) - * if Ident is an option as opposed to OptionMap which it would be, in lower layers. + * Top-Level expansion of a Term is slighly different the later levels. A + * the top it's always ident.map(id => ...) if Ident is an option as opposed + * to OptionMap which it would be, in lower layers. * - * Legend: - * T(x, [y,z]) := Term(x=name, children=List(y,z)), T-Opt=OptionalTerm I.e. term where optional=true - * P(a, b) := Property(a, b) i.e. a.b - * M(a, v, P(v, b)) := Map(a, v, P(v, b)) or m.map(v => P(v, b)) + * Legend: T(x, [y,z]) := Term(x=name, children=List(y,z)), + * T-Opt=OptionalTerm I.e. term where optional=true P(a, b) := Property(a, + * b) i.e. a.b M(a, v, P(v, b)) := Map(a, v, P(v, b)) or m.map(v => P(v, b)) */ - def toAstTop(node: Term): List[(Ast, String)] = { + def toAstTop(node: Term): List[(Ast, String)] = node match { // Node without children // If leaf node, return the term, don't care about if it is optional or not @@ -260,22 +272,30 @@ object ElaborateStructure { val idV = Ident("v", Quat.Generic) val output = for { - elem <- list + elem <- list (newAst, name) <- toAst(elem, idV) // TODO Is this right? Should it be OptionTableMap? } yield (Map(Ident(name, Quat.Generic), idV, newAst), name) // Do not add top level field to the output. Otherwise it would be x -> (x.width, xwidth), (x.height, xheight) output } - } - private[getquill] def ofProduct[T: Type](side: ElaborationSide, baseName: String = "notused", udtBehavior: UdtBehavior = UdtBehavior.Leaf)(using Quotes) = + private[getquill] def ofProduct[T: Type]( + side: ElaborationSide, + baseName: String = "notused", + udtBehavior: UdtBehavior = UdtBehavior.Leaf + )(using Quotes) = base[T](Term(baseName, Branch), side, udtBehavior) } // end Term - /** Go through all possibilities that the element might be and collect their fields */ - def collectFields[Fields, Types](node: Term, fieldsTup: Type[Fields], typesTup: Type[Types], side: ElaborationSide)(using Quotes): List[Term] = { + /** + * Go through all possibilities that the element might be and collect their + * fields + */ + def collectFields[Fields, Types](node: Term, fieldsTup: Type[Fields], typesTup: Type[Types], side: ElaborationSide)( + using Quotes + ): List[Term] = { import quotes.reflect.{Term => QTerm, _} (fieldsTup, typesTup) match { @@ -287,7 +307,13 @@ object ElaborateStructure { } @tailrec - def flatten[Fields, Types](node: Term, fieldsTup: Type[Fields], typesTup: Type[Types], side: ElaborationSide, accum: List[Term] = List())(using Quotes): List[Term] = { + def flatten[Fields, Types]( + node: Term, + fieldsTup: Type[Fields], + typesTup: Type[Types], + side: ElaborationSide, + accum: List[Term] = List() + )(using Quotes): List[Term] = { import quotes.reflect.{Term => QTerm, _} def constValue[T: Type]: String = @@ -312,8 +338,7 @@ object ElaborateStructure { // println(s"------ Optional field expansion ${Type.of[field].constValue.toString}:${TypeRepr.of[tpe].show} is a product ----------") val baseTerm = base[tpe](childTerm, side) flatten(node, Type.of[fields], Type.of[types], side, baseTerm +: accum) - } - else { + } else { val childTerm = Term(Type.of[field].constValue, Leaf, optional = true) // println(s"------ Optional field expansion ${Type.of[field].constValue.toString}:${TypeRepr.of[tpe].show} is a Leaf ----------") flatten(node, Type.of[fields], Type.of[types], side, childTerm +: accum) @@ -333,7 +358,8 @@ object ElaborateStructure { case (_, '[EmptyTuple]) => accum.reverse - case _ => report.throwError("Cannot Derive Product during Type Flattening of Expression:\n" + (fieldsTup, typesTup)) + case _ => + report.throwError("Cannot Derive Product during Type Flattening of Expression:\n" + (fieldsTup, typesTup)) } } @@ -346,22 +372,20 @@ object ElaborateStructure { * Expand the structure of base term into series of terms for a given type * e.g. for Term(x) wrap Person (case class Person(name: String, age: Int)) * will be Term(name, Term(x, Branch), Leaf), Term(age, Term(x, Branch), Leaf) - * Note that this could potentially be different if we are on the encoding or the - * decoding side. For example, someone could create something like: - * {{ - * case class VerifiedName(val name: String) { ... } - * val decoding = MappedDecoding ... - * }} - * Since we only have decoders, we need to know that VerifiedName is an actual value - * type as opposed to an embedded case class (since ProtoQuill does not require presence - * of the 'Embedded' type). So we need to know that VerifiedName is going to be: - * {{ Term(x, Leaf)) }} - * as opposed what we would generically have thought: - * {{ Term(name, Term(x, Branch), Leaf)) }}. - * That means that we need to know whether to look for an encoder as opposed to a decoder - * when trying to wrap this type. + * Note that this could potentially be different if we are on the encoding or + * the decoding side. For example, someone could create something like: {{ + * case class VerifiedName(val name: String) { ... } val decoding = + * MappedDecoding ... }} Since we only have decoders, we need to know that + * VerifiedName is an actual value type as opposed to an embedded case class + * (since ProtoQuill does not require presence of the 'Embedded' type). So we + * need to know that VerifiedName is going to be: {{ Term(x, Leaf)) }} as + * opposed what we would generically have thought: {{ Term(name, Term(x, + * Branch), Leaf)) }}. That means that we need to know whether to look for an + * encoder as opposed to a decoder when trying to wrap this type. */ - def base[T: Type](term: Term, side: ElaborationSide, udtBehavior: UdtBehavior = UdtBehavior.Leaf)(using Quotes): Term = { + def base[T: Type](term: Term, side: ElaborationSide, udtBehavior: UdtBehavior = UdtBehavior.Leaf)(using + Quotes + ): Term = { import quotes.reflect.{Term => QTerm, _} // for errors/warnings @@ -408,11 +432,17 @@ object ElaborateStructure { case Some(ev) => // Otherwise, recursively summon fields ev match { - case '{ $m: Mirror.ProductOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } } => + case '{ + $m: Mirror.ProductOf[T] { + type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes + } + } => val children = flatten(term, Type.of[elementLabels], Type.of[elementTypes], side) term.withChildren(children) // TODO Make sure you can summon a ColumnResolver if there is a SumMirror, otherwise this kind of decoding should be impossible - case '{ $m: Mirror.SumOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } } => + case '{ + $m: Mirror.SumOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } + } => // Find field infos (i.e. Term objects) for all potential types that this coproduct could be val alternatives = collectFields(term, Type.of[elementLabels], Type.of[elementTypes], side) // Then merge them together to get one term representing all of their fields types. @@ -421,11 +451,14 @@ object ElaborateStructure { alternatives.reduce((termA, termB) => termA.merge[T](termB)) case _ => report.throwError( - s"Althought a mirror of the type ${Format.TypeOf[T]} can be summoned. It is not a sum-type, a product-type, or a ${encDecText} entity so its fields cannot be understood in the structure-elaborator. Its mirror is ${Format.Expr(ev)}" + s"Althought a mirror of the type ${Format.TypeOf[T]} can be summoned. It is not a sum-type, a product-type, or a ${encDecText} entity so its fields cannot be understood in the structure-elaborator. Its mirror is ${Format + .Expr(ev)}" ) } case None => - report.throwError(s"A mirror of the type ${Format.TypeOf[T]} cannot be summoned. It is not a sum-type, a product-type, or a ${encDecText} entity so its fields cannot be understood in the structure-elaborator.") + report.throwError( + s"A mirror of the type ${Format.TypeOf[T]} cannot be summoned. It is not a sum-type, a product-type, or a ${encDecText} entity so its fields cannot be understood in the structure-elaborator." + ) } } @@ -449,45 +482,37 @@ object ElaborateStructure { productized(side, baseName) /** - * Example: - * case class Person(name: String, age: Option[Int]) - * val p = Person("Joe") - * lift(p) - * Output: - * Quote(ast: CaseClass("name" -> ScalarTag(name), lifts: EagerLift(p.name, name)) + * Example: case class Person(name: String, age: Option[Int]) val p = + * Person("Joe") lift(p) Output: Quote(ast: CaseClass("name" -> + * ScalarTag(name), lifts: EagerLift(p.name, name)) * - * Example: - * case class Name(first: String, last: String) - * case class Person(name: Name) - * val p = Person(Name("Joe", "Bloggs")) - * lift(p) - * Output: - * Quote(ast: CaseClass("name" -> CaseClass("first" -> ScalarTag(namefirst), "last" -> ScalarTag(last)), - * lifts: EagerLift(p.name.first, namefirst), EagerLift(p.name.last, namelast)) + * Example: case class Name(first: String, last: String) case class + * Person(name: Name) val p = Person(Name("Joe", "Bloggs")) lift(p) Output: + * Quote(ast: CaseClass("name" -> CaseClass("first" -> ScalarTag(namefirst), + * "last" -> ScalarTag(last)), lifts: EagerLift(p.name.first, namefirst), + * EagerLift(p.name.last, namelast)) * - * Note for examples below: - * idA := "namefirst" - * idB := "namelast" + * Note for examples below: idA := "namefirst" idB := "namelast" * - * Example: - * case class Name(first: String, last: String) - * case class Person(name: Option[Name]) - * val p = Person(Some(Name("Joe", "Bloggs"))) - * lift(p) - * Output: - * Quote(ast: CaseClass("name" -> OptionSome(CaseClass("first" -> ScalarTag(idA), "last" -> ScalarTag(idB))), - * lifts: EagerLift(p.name.map(_.first), namefirst), EagerLift(p.name.map(_.last), namelast)) + * Example: case class Name(first: String, last: String) case class + * Person(name: Option[Name]) val p = Person(Some(Name("Joe", "Bloggs"))) + * lift(p) Output: Quote(ast: CaseClass("name" -> OptionSome(CaseClass("first" + * -> ScalarTag(idA), "last" -> ScalarTag(idB))), lifts: + * EagerLift(p.name.map(_.first), namefirst), EagerLift(p.name.map(_.last), + * namelast)) * - * Alternatively, the case where it is: - * val p = Person(None) the AST and lifts remain the same, only they effectively become None for every value: - * lifts: EagerLift(None , idA), EagerLift(None , idB)) + * Alternatively, the case where it is: val p = Person(None) the AST and lifts + * remain the same, only they effectively become None for every value: lifts: + * EagerLift(None , idA), EagerLift(None , idB)) * * Legend: x:a->b := Assignment(Ident("x"), a, b) */ // TODO Should have specific tests for this function indepdendently // keep namefirst, namelast etc.... so that testability is easier due to determinism // re-key by the UIDs later - def ofProductValue[T: Type](productValue: Expr[T], side: ElaborationSide)(using Quotes): TaggedLiftedCaseClass[Ast] = { + def ofProductValue[T: Type](productValue: Expr[T], side: ElaborationSide)(using + Quotes + ): TaggedLiftedCaseClass[Ast] = { val elaborated = ElaborateStructure.Term.ofProduct[T](side) // create a nested AST for the Term nest with the expected scalar tags inside val (_, nestedAst) = productValueToAst(elaborated) @@ -520,15 +545,17 @@ object ElaborateStructure { def summonElaboration[T: Type] = { val elaboration = ElaborateStructure.Term.ofProduct[T](side, udtBehavior = udtBehavior) if (elaboration.typeType == Leaf) - report.throwError(s"Error encoding UDT: ${Format.TypeOf[T]}. Elaboration detected no fields (i.e. was a leaf-type). This should not be possible.") + report.throwError( + s"Error encoding UDT: ${Format.TypeOf[T]}. Elaboration detected no fields (i.e. was a leaf-type). This should not be possible." + ) elaboration } val elaboration = summonElaboration[T] // If it is get the components val components = - DeconstructElaboratedEntityLevels.withTerms[T](elaboration).map((term, getter, rawTpe) => { - val tpe = innerType(rawTpe) + DeconstructElaboratedEntityLevels.withTerms[T](elaboration).map { (term, getter, rawTpe) => + val tpe = innerType(rawTpe) val isOpt = isOptional(rawTpe) // Note, we can't look at term.optional for optionality because that one is only optional on the term level, // in reality the type might be optional on the parent level as well. @@ -539,7 +566,7 @@ object ElaborateStructure { // just tacking on .toString after the p.name.last expression since that would be p.name.last:Option[String].toString which // makes an invalid query. See the MapFlicer for an example of this. (term.name, isOpt, getter, tpe) - }) + } (components, elaboration.typeType) } // end decomposedProductValueDetails @@ -547,38 +574,51 @@ object ElaborateStructure { import quotes.reflect._ // for t:T := Person(name: String, age: Int) it will be paths := List[Expr](t.name, t.age) (labels: List("name", "age")) // for t:T := Person(name: Name, age: Int), Name(first:String, last: String) it will be paths := List[Expr](t.name.first, t.name.last, t.age) (labels: List(namefirst, namelast, age)) - val labels = elaboration.paths + val labels = elaboration.paths val pathLambdas = DeconstructElaboratedEntityLevels[T](elaboration) val paths: List[Expr[_]] = pathLambdas.map { (exprPath, exprType) => exprType match { case '[t] => if (TypeRepr.of[t] =:= TypeRepr.of[Any]) { lazy val showableExprPath = '{ (input: T) => ${ exprPath('input) } } - report.warning(s"The following the expression was typed `Any`: ${Format.Expr(showableExprPath)}. Will likely not be able to summon an encoder for this (the actual type was: ${Format.TypeOf[T]} in ${Format.TypeRepr( - showableExprPath.asTerm.tpe - )}) (the other param was ${Format.TypeOf[T]}.") + report.warning( + s"The following the expression was typed `Any`: ${Format.Expr(showableExprPath)}. Will likely not be able to summon an encoder for this (the actual type was: ${Format + .TypeOf[T]} in ${Format.TypeRepr( + showableExprPath.asTerm.tpe + )}) (the other param was ${Format.TypeOf[T]}." + ) } '{ ${ exprPath(productValue) }.asInstanceOf[t] } } } if (labels.length != pathLambdas.length) - report.throwError(s"List of (${labels.length}) labels: ${labels} does not match list of (${paths.length}) paths that they represent: ${paths.map(Format.Expr(_))}") + report.throwError( + s"List of (${labels.length}) labels: ${labels} does not match list of (${paths.length}) paths that they represent: ${paths + .map(Format.Expr(_))}" + ) val outputs = labels.zip(paths) outputs.foreach { (label, exprPath) => if (exprPath.asTerm.tpe =:= TypeRepr.of[Any]) - report.warning(s"`Any` value found for the path ${label} at the expression ${Format.Expr(exprPath)}. Will likely not be able to summon an encoder for this.") + report.warning( + s"`Any` value found for the path ${label} at the expression ${Format.Expr(exprPath)}. Will likely not be able to summon an encoder for this." + ) } outputs } - private[getquill] def decomposedLiftsOfProductValue[T: Type](elaboration: Term)(using Quotes): List[(Expr[T] => Expr[_], Type[_])] = + private[getquill] def decomposedLiftsOfProductValue[T: Type](elaboration: Term)(using + Quotes + ): List[(Expr[T] => Expr[_], Type[_])] = DeconstructElaboratedEntityLevels[T](elaboration) /** * Flatten the elaboration from 'node' into a completely flat product type - * Technicallly don't need Type T but it's very useful to know for errors and it's an internal API so I'll keep it for now + * Technicallly don't need Type T but it's very useful to know for errors and + * it's an internal API so I'll keep it for now */ - private[getquill] def productValueToAst[T: Type](node: Term /* i.e. the elaboration */ )(using Quotes): (String, Ast) = { + private[getquill] def productValueToAst[T: Type]( + node: Term /* i.e. the elaboration */ + )(using Quotes): (String, Ast) = { def toAstRec(node: Term, parentTerms: Chunk[String], topLevel: Boolean = false): (String, Ast) = { def notTopLevel(termName: Chunk[String]) = if (topLevel) Chunk.empty else termName node match { @@ -586,7 +626,7 @@ object ElaborateStructure { // CC(foo: CC(bar: CC(baz: String))) should be: ScalarTag(foobarbaz, Source.UnparsedProperty("foo_bar_baz")) // the UnparsedProperty part is potentially used in batch queries for property naming val tagTerms = parentTerms :+ name - val tagName = tagTerms.mkString + val tagName = tagTerms.mkString // There could be variable names that have "$" in them e.g. anonymous tuples as in: // foreach(List((foo,bar),(baz,blin))).map { case (a, b) => query[Update](...a...) } // so the batch identifier is unknown would manifest as x$1 etc... Make sure to at least remove $ from the variable name @@ -608,7 +648,7 @@ object ElaborateStructure { } extension [T](opt: Option[T]) { - def getOrThrow(msg: String) = opt.getOrElse { throw new IllegalArgumentException(msg) } + def getOrThrow(msg: String) = opt.getOrElse(throw new IllegalArgumentException(msg)) } case class TaggedLiftedCaseClass[A <: Ast](caseClass: A, lifts: List[(String, Expr[_])]) { @@ -618,16 +658,15 @@ object ElaborateStructure { /** Replace keys of the tagged lifts with proper UUIDs */ def reKeyWithUids(): TaggedLiftedCaseClass[A] = { def replaceKeys(newKeys: Map[String, String]): Ast = - Transform(caseClass) { - case ScalarTag(keyName, source) => - lazy val msg = s"Cannot find key: '${keyName}' in the list of replacements: ${newKeys}" - ScalarTag(newKeys.get(keyName).getOrThrow(msg), source) + Transform(caseClass) { case ScalarTag(keyName, source) => + lazy val msg = s"Cannot find key: '${keyName}' in the list of replacements: ${newKeys}" + ScalarTag(newKeys.get(keyName).getOrThrow(msg), source) } - val oldAndNewKeys = lifts.map((key, expr) => (key, uuid(), expr)) - val keysToNewKeys = oldAndNewKeys.map((key, newKey, _) => (key, newKey)).toMap + val oldAndNewKeys = lifts.map((key, expr) => (key, uuid(), expr)) + val keysToNewKeys = oldAndNewKeys.map((key, newKey, _) => (key, newKey)).toMap val newNewKeysToLifts = oldAndNewKeys.map((_, newKey, lift) => (newKey, lift)) - val newAst = replaceKeys(keysToNewKeys) + val newAst = replaceKeys(keysToNewKeys) TaggedLiftedCaseClass(newAst.asInstanceOf[A], newNewKeysToLifts) } } diff --git a/quill-sql/src/main/scala/io/getquill/generic/EncodingDsl.scala b/quill-sql/src/main/scala/io/getquill/generic/EncodingDsl.scala index 7ad05a8a0..d21af614f 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/EncodingDsl.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/EncodingDsl.scala @@ -8,26 +8,31 @@ import io.getquill.MappedEncoding import io.getquill.generic.DecodingType /** - * Note that much of the implementation of anyValEncoder/anyValDecoder is a workaround for: + * Note that much of the implementation of anyValEncoder/anyValDecoder is a + * workaround for: * https://github.com/lampepfl/dotty/issues/12179#issuecomment-826294510 * - * Originally, the idea was to simply pass the `self` in `LowPriorityImplicits` directly - * into the macro that creates the AnyValEncoders. That way, the implementation would be as simple as: + * Originally, the idea was to simply pass the `self` in `LowPriorityImplicits` + * directly into the macro that creates the AnyValEncoders. That way, the + * implementation would be as simple as: * {{{ * trait LowPriorityImplicits { self: EncodingDsl => * implicit inline def anyValEncoder[Cls <: AnyVal]: Encoder[Cls] = * new MappedEncoderMaker[Encoder, Cls](self) * } * }}} - * Then, the MappedEncoderMaker could just internally call `self.mappedEncoder(mapped, encoder)` - * (where this `self` is the one that is passed in from the `LowPriorityImplicits`). + * Then, the MappedEncoderMaker could just internally call + * `self.mappedEncoder(mapped, encoder)` (where this `self` is the one that is + * passed in from the `LowPriorityImplicits`). * - * Unfortunately however, because of Dotty#12179, this would create an implicit encoder which would - * never be found. This created the need for the additional abstraction of AnyValEncoderContext and - * AnyValDecoderContext which would define `makeMappedEncoder`/`makeMappedDecoder` stub methods - * that the `LowPriorityImplicits` methods `anyValEncoder`/`anyValDecoder` could delegate the actual - * encoding/decoding work into. Hopefully when Dotty#12179 is resolved all of this convoluted logic - * can be removed and we can go back to the simpler implementation. + * Unfortunately however, because of Dotty#12179, this would create an implicit + * encoder which would never be found. This created the need for the additional + * abstraction of AnyValEncoderContext and AnyValDecoderContext which would + * define `makeMappedEncoder`/`makeMappedDecoder` stub methods that the + * `LowPriorityImplicits` methods `anyValEncoder`/`anyValDecoder` could delegate + * the actual encoding/decoding work into. Hopefully when Dotty#12179 is + * resolved all of this convoluted logic can be removed and we can go back to + * the simpler implementation. */ trait LowPriorityImplicits { self: EncodingDsl => @@ -72,24 +77,36 @@ trait EncodingDsl extends LowPriorityImplicits { self => // extends LowPriorityI // Initial Encoder/Decoder classes that Context implementations will subclass for their // respective Encoder[T]/Decoder[T] implementations e.g. JdbcEncoder[T](...) extends BaseEncoder[T] - type BaseEncoder[T] = GenericEncoder[T, PrepareRow, Session] - type BaseDecoder[T] = GenericDecoder[ResultRow, Session, T, DecodingType.Specific] + type BaseEncoder[T] = GenericEncoder[T, PrepareRow, Session] + type BaseDecoder[T] = GenericDecoder[ResultRow, Session, T, DecodingType.Specific] type BaseNullChecker = GenericNullChecker[ResultRow, Session] type ColumnResolver = GenericColumnResolver[ResultRow] - type RowTyper[T] = GenericRowTyper[ResultRow, T] + type RowTyper[T] = GenericRowTyper[ResultRow, T] // For: Mapped := Foo(value: String), Base := String // Encoding follows: (MappedEncoding(Foo) => String) <=(contramap)= Encoder(Foo) - implicit def mappedEncoder[Mapped, Base](implicit mapped: MappedEncoding[Mapped, Base], encoder: Encoder[Base]): Encoder[Mapped] + implicit def mappedEncoder[Mapped, Base](implicit + mapped: MappedEncoding[Mapped, Base], + encoder: Encoder[Base] + ): Encoder[Mapped] // For: Base := String, Mapped := Foo(value: String) // Decoding follows: (MappedEncoding(String) => Foo) =(map)=> Decoder(Foo) - implicit def mappedDecoder[Base, Mapped](implicit mapped: MappedEncoding[Base, Mapped], decoder: Decoder[Base]): Decoder[Mapped] + implicit def mappedDecoder[Base, Mapped](implicit + mapped: MappedEncoding[Base, Mapped], + decoder: Decoder[Base] + ): Decoder[Mapped] - protected def mappedBaseEncoder[Mapped, Base](mapped: MappedEncoding[Mapped, Base], encoder: EncoderMethod[Base]): EncoderMethod[Mapped] = + protected def mappedBaseEncoder[Mapped, Base]( + mapped: MappedEncoding[Mapped, Base], + encoder: EncoderMethod[Base] + ): EncoderMethod[Mapped] = (index, value, row, session) => encoder(index, mapped.f(value), row, session) - protected def mappedBaseDecoder[Base, Mapped](mapped: MappedEncoding[Base, Mapped], decoder: DecoderMethod[Base]): DecoderMethod[Mapped] = + protected def mappedBaseDecoder[Base, Mapped]( + mapped: MappedEncoding[Base, Mapped], + decoder: DecoderMethod[Base] + ): DecoderMethod[Mapped] = (index, row, session) => mapped.f(decoder(index, row, session)) // Define some standard encoders that all contexts should have diff --git a/quill-sql/src/main/scala/io/getquill/generic/GenericDecoder.scala b/quill-sql/src/main/scala/io/getquill/generic/GenericDecoder.scala index 393708688..d93708851 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/GenericDecoder.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/GenericDecoder.scala @@ -16,7 +16,7 @@ trait GenericColumnResolver[ResultRow] { sealed trait DecodingType object DecodingType { - sealed trait Generic extends DecodingType + sealed trait Generic extends DecodingType sealed trait Specific extends DecodingType } @@ -29,17 +29,24 @@ trait GenericRowTyper[ResultRow, Co] { } object Summon { - def nullChecker[ResultRow: Type, Session: Type](index: Expr[Int], resultRow: Expr[ResultRow])(using Quotes): Expr[Boolean] = { + def nullChecker[ResultRow: Type, Session: Type](index: Expr[Int], resultRow: Expr[ResultRow])(using + Quotes + ): Expr[Boolean] = { import quotes.reflect._ Expr.summon[GenericNullChecker[ResultRow, Session]] match { case Some(nullChecker) => '{ $nullChecker($index, $resultRow) } case None => // TODO Maybe check the session type and based on what it is, say "Cannot summon a JDBC null-chercker..." - report.throwError(s"Cannot find a null-checker for the session type ${Format.TypeOf[Session]} (whose result-row type is: ${Format.TypeOf[ResultRow]})") + report.throwError(s"Cannot find a null-checker for the session type ${Format + .TypeOf[Session]} (whose result-row type is: ${Format.TypeOf[ResultRow]})") } } - def decoder[ResultRow: Type, Session: Type, T: Type](index: Expr[Int], resultRow: Expr[ResultRow], session: Expr[Session])(using Quotes): Option[Expr[T]] = { + def decoder[ResultRow: Type, Session: Type, T: Type]( + index: Expr[Int], + resultRow: Expr[ResultRow], + session: Expr[Session] + )(using Quotes): Option[Expr[T]] = { import quotes.reflect.{Term => QTerm, _} // Try to summon a specific decoder, if it's not there, summon a generic one Expr.summon[GenericDecoder[ResultRow, Session, T, DecodingType.Specific]] match { @@ -50,7 +57,11 @@ object Summon { } } - def decoderOrFail[ResultRow: Type, Session: Type, T: Type](index: Expr[Int], resultRow: Expr[ResultRow], session: Expr[Session])(using Quotes): Expr[T] = { + def decoderOrFail[ResultRow: Type, Session: Type, T: Type]( + index: Expr[Int], + resultRow: Expr[ResultRow], + session: Expr[Session] + )(using Quotes): Expr[T] = { import quotes.reflect._ decoder[ResultRow, Session, T](index, resultRow, session) match { case Some(value) => value @@ -63,7 +74,9 @@ object Summon { object GenericDecoder { - def tryResolveIndex[ResultRow: Type](originalIndex: Expr[Int], resultRow: Expr[ResultRow], fieldName: String)(using Quotes) = { + def tryResolveIndex[ResultRow: Type](originalIndex: Expr[Int], resultRow: Expr[ResultRow], fieldName: String)(using + Quotes + ) = { import quotes.reflect._ Expr.summon[GenericColumnResolver[ResultRow]] match { case Some(resolver) => Some('{ $resolver($resultRow, ${ Expr(fieldName) }) }) @@ -79,11 +92,13 @@ object GenericDecoder { @tailrec def flatten[ResultRow: Type, Session: Type, Fields: Type, Types: Type]( - index: Int, - baseIndex: Expr[Int], - resultRow: Expr[ResultRow], - session: Expr[Session] - )(fieldsTup: Type[Fields], typesTup: Type[Types], accum: List[FlattenData] = List())(using Quotes): List[FlattenData] = { + index: Int, + baseIndex: Expr[Int], + resultRow: Expr[ResultRow], + session: Expr[Session] + )(fieldsTup: Type[Fields], typesTup: Type[Types], accum: List[FlattenData] = List())(using + Quotes + ): List[FlattenData] = { import quotes.reflect.{Term => QTerm, _} (fieldsTup, typesTup) match { @@ -91,14 +106,19 @@ object GenericDecoder { // or if it is an embedded entity e.g. Person(name: Name, age: Int) where Name is `case class Name(first: String, last: String)`. // TODO summoning `GenericDecoder[ResultRow, T, Session, DecodingType.Specific]` here twice, once in the if statement, // then later in summonAndDecode. This can potentially be improved i.e. it can be summoned just once and reused. - case ('[field *: fields], '[tpe *: types]) if Expr.summon[GenericDecoder[ResultRow, Session, tpe, DecodingType.Specific]].isEmpty => + case ('[field *: fields], '[tpe *: types]) + if Expr.summon[GenericDecoder[ResultRow, Session, tpe, DecodingType.Specific]].isEmpty => // Get the field class as an actual string, on the mirror itself it's stored as a type val fieldValue = Type.of[field].constValue - val result = decode[tpe, ResultRow, Session](index, baseIndex, resultRow, session) + val result = decode[tpe, ResultRow, Session](index, baseIndex, resultRow, session) // Say we are on Person(id(c1): Int, name: Name(first(c2): String, last(c3): String), age(c4): Int) // if we are on the at the last `age` field the last recursion of the `name` field should have bumped our index 3 val nextIndex = result.index + 1 - flatten[ResultRow, Session, fields, types](nextIndex, baseIndex, resultRow, session)(Type.of[fields], Type.of[types], result +: accum) + flatten[ResultRow, Session, fields, types](nextIndex, baseIndex, resultRow, session)( + Type.of[fields], + Type.of[types], + result +: accum + ) case ('[field *: fields], '[tpe *: types]) => val fieldValue = Type.of[field].constValue @@ -115,9 +135,13 @@ object GenericDecoder { // For example if the index is 0, the ColumnResolver will look at Square and resolve that index to the // `length` column whose actual index (of ResultRow) as seen above is 3. val possiblyShiftedIndex = tryResolveIndex[ResultRow]('{ $baseIndex + ${ Expr(index) } }, resultRow, fieldValue) - val result = decode[tpe, ResultRow, Session](index, baseIndex, resultRow, session, possiblyShiftedIndex) - val nextIndex = index + 1 - flatten[ResultRow, Session, fields, types](nextIndex, baseIndex, resultRow, session)(Type.of[fields], Type.of[types], result +: accum) + val result = decode[tpe, ResultRow, Session](index, baseIndex, resultRow, session, possiblyShiftedIndex) + val nextIndex = index + 1 + flatten[ResultRow, Session, fields, types](nextIndex, baseIndex, resultRow, session)( + Type.of[fields], + Type.of[types], + result +: accum + ) case (_, '[EmptyTuple]) => accum @@ -125,7 +149,12 @@ object GenericDecoder { } } // end flatten - def decodeOptional[T: Type, ResultRow: Type, Session: Type](index: Int, baseIndex: Expr[Int], resultRow: Expr[ResultRow], session: Expr[Session])(using Quotes): FlattenData = { + def decodeOptional[T: Type, ResultRow: Type, Session: Type]( + index: Int, + baseIndex: Expr[Int], + resultRow: Expr[ResultRow], + session: Expr[Session] + )(using Quotes): FlattenData = { import quotes.reflect._ // Try to summon a specific optional from the context, this may not exist since // some optionDecoder implementations themselves rely on the context-speicific Decoder[T] which is actually @@ -134,14 +163,16 @@ object GenericDecoder { // In the case that this is a leaf node case Some(_) => - val decoder = Summon.decoderOrFail[ResultRow, Session, Option[T]]('{ $baseIndex + ${ Expr(index) } }, resultRow, session) + val decoder = + Summon.decoderOrFail[ResultRow, Session, Option[T]]('{ $baseIndex + ${ Expr(index) } }, resultRow, session) val nullChecker = Summon.nullChecker[ResultRow, Session]('{ $baseIndex + ${ Expr(index) } }, resultRow) FlattenData(Type.of[Option[T]], decoder, '{ !${ nullChecker } }, index) // This is the cases where we have a optional-product element. It could either be a top level // element e.g. Option[Row] or a nested element i.e. the Option[Name] in Person(name: Option[Name], age: Int) case None => - val FlattenData(_, construct, nullCheck, lastIndex) = decode[T, ResultRow, Session](index, baseIndex, resultRow, session) + val FlattenData(_, construct, nullCheck, lastIndex) = + decode[T, ResultRow, Session](index, baseIndex, resultRow, session) val constructOrNone = '{ if (${ nullCheck }) Some[T](${ construct.asExprOf[T] }) else None } FlattenData(Type.of[Option[T]], constructOrNone, nullCheck, lastIndex) } @@ -197,7 +228,13 @@ object GenericDecoder { isOption[T] || (TypeRepr.of[T] <:< TypeRepr.of[Seq[_]]) } - def decode[T: Type, ResultRow: Type, Session: Type](index: Int, baseIndex: Expr[Int], resultRow: Expr[ResultRow], session: Expr[Session], overriddenIndex: Option[Expr[Int]] = None)(using Quotes): FlattenData = { + def decode[T: Type, ResultRow: Type, Session: Type]( + index: Int, + baseIndex: Expr[Int], + resultRow: Expr[ResultRow], + session: Expr[Session], + overriddenIndex: Option[Expr[Int]] = None + )(using Quotes): FlattenData = { import quotes.reflect._ // index of a possible decoder element if we need one lazy val elementIndex = '{ $baseIndex + ${ Expr(index) } } @@ -220,36 +257,59 @@ object GenericDecoder { case Some(ev) => // Otherwise, recursively summon fields ev match { - case '{ $m: Mirror.SumOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } } if (!isBuiltInType[T]) => + case '{ + $m: Mirror.SumOf[T] { + type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes + } + } if (!isBuiltInType[T]) => // do not treat optional objects as coproduts, a Specific (i.e. EncodingType.Specific) Option-decoder // is defined in the EncodingDsl DecodeSum[T, ResultRow, Session, elementTypes](index, baseIndex, resultRow, session) - case '{ $m: Mirror.ProductOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } } => - val children = flatten(index, baseIndex, resultRow, session)(Type.of[elementLabels], Type.of[elementTypes]).reverse + case '{ + $m: Mirror.ProductOf[T] { + type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes + } + } => + val children = + flatten(index, baseIndex, resultRow, session)(Type.of[elementLabels], Type.of[elementTypes]).reverse decodeProduct[T](children, m) - case _ => report.throwError(s"Decoder for ${Format.TypeOf[T]} could not be summoned. It has no decoder and is not a recognized Product or Sum type.") + case _ => + report.throwError( + s"Decoder for ${Format.TypeOf[T]} could not be summoned. It has no decoder and is not a recognized Product or Sum type." + ) } // end match case _ => - report.throwError(s"No Decoder found for ${Format.TypeOf[T]} and it is not a class representing a group of columns") + report.throwError( + s"No Decoder found for ${Format.TypeOf[T]} and it is not a class representing a group of columns" + ) } // end match - } // end match + } // end match } } // end decode - def summon[T: Type, ResultRow: Type, Session: Type](using quotes: Quotes): Expr[GenericDecoder[ResultRow, Session, T, DecodingType.Generic]] = { + def summon[T: Type, ResultRow: Type, Session: Type](using + quotes: Quotes + ): Expr[GenericDecoder[ResultRow, Session, T, DecodingType.Generic]] = { import quotes.reflect._ '{ new GenericDecoder[ResultRow, Session, T, DecodingType.Generic] { - def apply(baseIndex: Int, resultRow: ResultRow, session: Session) = ${ GenericDecoder.decode[T, ResultRow, Session](0, 'baseIndex, 'resultRow, 'session).decodedExpr }.asInstanceOf[T] + def apply(baseIndex: Int, resultRow: ResultRow, session: Session) = ${ + GenericDecoder.decode[T, ResultRow, Session](0, 'baseIndex, 'resultRow, 'session).decodedExpr + }.asInstanceOf[T] } } } } object DecodeSum { - def apply[T: Type, ResultRow: Type, Session: Type, ElementTypes: Type](index: Int, baseIndex: Expr[Int], resultRow: Expr[ResultRow], session: Expr[Session])(using Quotes): FlattenData = { + def apply[T: Type, ResultRow: Type, Session: Type, ElementTypes: Type]( + index: Int, + baseIndex: Expr[Int], + resultRow: Expr[ResultRow], + session: Expr[Session] + )(using Quotes): FlattenData = { import quotes.reflect._ // First make sure there is a column resolver, otherwise we can't look up fields by name which // means we can't get specific fields which means we can't decode co-products @@ -259,7 +319,8 @@ object DecodeSum { Expr.summon[GenericColumnResolver[ResultRow]] match { case None => report.warning( - s"Need column resolver for in order to be able to decode a coproduct but none exists for ${Format.TypeOf[T]} (row type: ${Format.TypeOf[ResultRow]}). " + + s"Need column resolver for in order to be able to decode a coproduct but none exists for ${Format + .TypeOf[T]} (row type: ${Format.TypeOf[ResultRow]}). " + s"\nHave you extended the a MirrorContext and made sure to `import ctx.{given, _}`." + s"\nOtherwise a failure will occur with the encoder at runtime" ) @@ -271,26 +332,45 @@ object DecodeSum { case Some(rowTyper) => val rowTypeClassTag = '{ $rowTyper($resultRow) } // then go through the elementTypes and match the one that the rowClass refers to. Then decode it (i.e. recurse on the GenericDecoder with it) - selectMatchingElementAndDecode[ElementTypes, ResultRow, Session, T](index, baseIndex, resultRow, session, rowTypeClassTag)(Type.of[ElementTypes]) + selectMatchingElementAndDecode[ElementTypes, ResultRow, Session, T]( + index, + baseIndex, + resultRow, + session, + rowTypeClassTag + )(Type.of[ElementTypes]) case None => // Technically this should be an error but if I make it into one, the user will have zero feedback as to what is going on and // the output will be "Decoder could not be summoned during query execution". At least in this situation // the user actually has actionable information on how to resolve the problem. - report.warning(s"Need a RowTyper for ${Format.TypeOf[T]}. Have you implemented a RowTyper for it? Otherwise the decoder will fail at runtime if this type is encountered") + report.warning( + s"Need a RowTyper for ${Format.TypeOf[T]}. Have you implemented a RowTyper for it? Otherwise the decoder will fail at runtime if this type is encountered" + ) val msg = Expr(s"Cannot summon RowTyper for type: ${Format.TypeOf[T]}") FlattenData(Type.of[T], '{ throw new IllegalArgumentException($msg) }, '{ false }, 0) } } } - /** Find a type from a coproduct type that matches a given ClassTag, if it matches, summon a decoder for it and decode it */ - def selectMatchingElementAndDecode[Types: Type, ResultRow: Type, Session: Type, T: Type](index: Int, rawIndex: Expr[Int], resultRow: Expr[ResultRow], session: Expr[Session], rowTypeClassTag: Expr[ClassTag[_]])(typesTup: Type[Types])(using - Quotes + /** + * Find a type from a coproduct type that matches a given ClassTag, if it + * matches, summon a decoder for it and decode it + */ + def selectMatchingElementAndDecode[Types: Type, ResultRow: Type, Session: Type, T: Type]( + index: Int, + rawIndex: Expr[Int], + resultRow: Expr[ResultRow], + session: Expr[Session], + rowTypeClassTag: Expr[ClassTag[_]] + )(typesTup: Type[Types])(using + Quotes ): FlattenData = { import quotes.reflect._ typesTup match { case ('[tpe *: types]) => - println(s"(Co-Product) Checking if ${Format.TypeOf[tpe]} == ${Format.Expr(rowTypeClassTag)} and should be spliced into index: ${index}") + println( + s"(Co-Product) Checking if ${Format.TypeOf[tpe]} == ${Format.Expr(rowTypeClassTag)} and should be spliced into index: ${index}" + ) val possibleElementClass = Expr.summon[ClassTag[tpe]] match { case Some(cls) => '{ $cls.runtimeClass } @@ -302,11 +382,18 @@ object DecodeSum { // case class Square(length: Int, width: Int) extends Shape // case class Circle(radius: Int) extends Shape // In that case `tpe` here will be Square/Circle - val thisElementDecoder = GenericDecoder.decode[tpe, ResultRow, Session](index, rawIndex, resultRow, session).decodedExpr + val thisElementDecoder = + GenericDecoder.decode[tpe, ResultRow, Session](index, rawIndex, resultRow, session).decodedExpr val thisElementNullChecker = '{ !${ Summon.nullChecker[ResultRow, Session](rawIndex, resultRow) } } // make the recursive call - val nextData = selectMatchingElementAndDecode[types, ResultRow, Session, T](index + 1, rawIndex, resultRow, session, rowTypeClassTag)(Type.of[types]) + val nextData = selectMatchingElementAndDecode[types, ResultRow, Session, T]( + index + 1, + rawIndex, + resultRow, + session, + rowTypeClassTag + )(Type.of[types]) val rowTypeClass = '{ $rowTypeClassTag.runtimeClass } val decodedElement = @@ -338,7 +425,7 @@ object ConstructDecoded { def apply[T: Type](types: List[Type[_]], terms: List[Expr[_]], m: Expr[Mirror.ProductOf[T]])(using Quotes) = { import quotes.reflect._ // Get the constructor - val tpe = TypeRepr.of[T] + val tpe = TypeRepr.of[T] val constructor = TypeRepr.of[T].typeSymbol.primaryConstructor // If we are a tuple, we can easily construct it if (tpe <:< TypeRepr.of[Tuple]) { @@ -357,7 +444,9 @@ object ConstructDecoded { // println(s"=========== Create from Tuple Constructor ${Format.Expr(construct.asExprOf[T])} ===========") construct.asExprOf[T] // If we are a case class with no generic parameters, we can easily construct it - } else if (tpe.classSymbol.exists(_.flags.is(Flags.Case)) && !constructor.paramSymss.exists(_.exists(_.isTypeParam))) { + } else if ( + tpe.classSymbol.exists(_.flags.is(Flags.Case)) && !constructor.paramSymss.exists(_.exists(_.isTypeParam)) + ) { val construct = Apply( Select(New(TypeTree.of[T]), constructor), diff --git a/quill-sql/src/main/scala/io/getquill/generic/GenericEncoder.scala b/quill-sql/src/main/scala/io/getquill/generic/GenericEncoder.scala index 084c5c2de..1d8820fb5 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/GenericEncoder.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/GenericEncoder.scala @@ -12,9 +12,10 @@ trait GenericEncoder[T, PrepareRow, Session] extends ((Int, T, PrepareRow, Sessi } case class GenericEncoderWithStringFallback[T, PrepareRow, Session]( - nullableEncoder: GenericEncoder[Option[T], PrepareRow, Session], - stringConverter: Either[String, FromString[T]] -)(classTagExpected: ClassTag[T]) extends GenericEncoder[Any, PrepareRow, Session] { + nullableEncoder: GenericEncoder[Option[T], PrepareRow, Session], + stringConverter: Either[String, FromString[T]] +)(classTagExpected: ClassTag[T]) + extends GenericEncoder[Any, PrepareRow, Session] { private def classTagFromInstance(t: Any) = // if the value is just null, use the original encoder, since value conversion shouldn't mater diff --git a/quill-sql/src/main/scala/io/getquill/generic/TupleMember.scala b/quill-sql/src/main/scala/io/getquill/generic/TupleMember.scala index bf4142561..d63c2b74e 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/TupleMember.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/TupleMember.scala @@ -19,14 +19,16 @@ object TupleMember { object ElaboratedField { def apply(tpe: TypeRepr, fieldName: String) = { val typeSymbol = tpe.typeSymbol - typeSymbol.methodMembers.find(m => m.name == fieldName && m.paramSymss == List()).map(ZeroArgsMethod(_)) + typeSymbol.methodMembers + .find(m => m.name == fieldName && m.paramSymss == List()) + .map(ZeroArgsMethod(_)) .orElse(typeSymbol.fieldMembers.find(m => m.name == fieldName).map(Field(_))) .getOrElse(NotFound) } case class ZeroArgsMethod(symbol: Symbol) extends ElaboratedField - case class Field(symbol: Symbol) extends ElaboratedField - case object NotFound extends ElaboratedField + case class Field(symbol: Symbol) extends ElaboratedField + case object NotFound extends ElaboratedField } // end ElaboratedField val clsType = TypeRepr.of[T] @@ -36,9 +38,11 @@ object TupleMember { // val memberSymbol = clsType.typeSymbol.fieldMember("_1") val elab = ElaboratedField(clsType, matchMember) elab match { - case ElaboratedField.ZeroArgsMethod(sym) => report.info(s"${sym} is a zero-args member whose type is ${clsType.widen.memberType(sym).widen}") - case ElaboratedField.Field(sym) => report.info(s"${sym} is a field whose type is ${clsType.widen.memberType(sym).widen}") - case ElaboratedField.NotFound => report.info(s"${matchMember} was not found") + case ElaboratedField.ZeroArgsMethod(sym) => + report.info(s"${sym} is a zero-args member whose type is ${clsType.widen.memberType(sym).widen}") + case ElaboratedField.Field(sym) => + report.info(s"${sym} is a field whose type is ${clsType.widen.memberType(sym).widen}") + case ElaboratedField.NotFound => report.info(s"${matchMember} was not found") } '{ () } diff --git a/quill-sql/src/main/scala/io/getquill/generic/WarnMac.scala b/quill-sql/src/main/scala/io/getquill/generic/WarnMac.scala index b691f8901..5a123b4f0 100644 --- a/quill-sql/src/main/scala/io/getquill/generic/WarnMac.scala +++ b/quill-sql/src/main/scala/io/getquill/generic/WarnMac.scala @@ -9,7 +9,8 @@ object WarnMac { import io.getquill.util.Format msg match { case Expr(str: String) => - println(s"${str} - ${Format.TypeRepr(TypeRepr.of[F])}: ${Format.TypeRepr(TypeRepr.of[T])} -> ${Format.TypeRepr(TypeRepr.of[Tail])}") + println(s"${str} - ${Format.TypeRepr(TypeRepr.of[F])}: ${Format.TypeRepr(TypeRepr.of[T])} -> ${Format + .TypeRepr(TypeRepr.of[Tail])}") } '{ () } } diff --git a/quill-sql/src/main/scala/io/getquill/idiom/LoadNaming.scala b/quill-sql/src/main/scala/io/getquill/idiom/LoadNaming.scala index 5ca63246c..7077e9511 100644 --- a/quill-sql/src/main/scala/io/getquill/idiom/LoadNaming.scala +++ b/quill-sql/src/main/scala/io/getquill/idiom/LoadNaming.scala @@ -26,21 +26,25 @@ object LoadNaming { for { optClassSymbol <- Try(loadClassType.classSymbol) className <- Try { - optClassSymbol match { - case Some(value) => Success(value.fullName) - case None => - if (!loadClassType.termSymbol.moduleClass.isNoSymbol) - Success(loadClassType.termSymbol.moduleClass.fullName) - else - Failure(new IllegalArgumentException(s"The class ${loadClassType.show} cannot be loaded because it is not a scala class or module")) - } - }.flatten + optClassSymbol match { + case Some(value) => Success(value.fullName) + case None => + if (!loadClassType.termSymbol.moduleClass.isNoSymbol) + Success(loadClassType.termSymbol.moduleClass.fullName) + else + Failure( + new IllegalArgumentException( + s"The class ${loadClassType.show} cannot be loaded because it is not a scala class or module" + ) + ) + } + }.flatten field <- Try { - val clsFull = `endWith$`(className) - val cls = Class.forName(clsFull) - val field = cls.getField("MODULE$") - field.get(cls).asInstanceOf[T] - } + val clsFull = `endWith$`(className) + val cls = Class.forName(clsFull) + val field = cls.getField("MODULE$") + field.get(cls).asInstanceOf[T] + } } yield (field) } @@ -57,7 +61,8 @@ object LoadNaming { treeTpe match { case AppliedType(_, types) => types - .filter(_.isInstanceOf[TypeRepr]).map(_.asInstanceOf[TypeRepr]) + .filter(_.isInstanceOf[TypeRepr]) + .map(_.asInstanceOf[TypeRepr]) .filterNot(_ =:= TypeRepr.of[NamingStrategy]) .filterNot(_ =:= TypeRepr.of[Nothing]) } diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/ExprAccumulate.scala b/quill-sql/src/main/scala/io/getquill/metaprog/ExprAccumulate.scala index a2fa74e93..811d21659 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/ExprAccumulate.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/ExprAccumulate.scala @@ -6,7 +6,10 @@ import scala.collection.mutable.ArrayBuffer import io.getquill.util.Format import scala.util.Try -/** Remove all instances of SerialHelper.fromSerialized from a tree (for printing purposes) */ +/** + * Remove all instances of SerialHelper.fromSerialized from a tree (for printing + * purposes) + */ object DeserializeAstInstances { def apply[T: Type](input: Expr[T])(using Quotes): Expr[T] = { import quotes.reflect.{Try => _, _} @@ -60,7 +63,9 @@ object DeserializeAstInstances { } // end DeserializeAstInstances object ExprAccumulate { - def apply[T: Type, ExpectedType](input: Expr[Any], recurseWhenMatched: Boolean = true)(matcher: PartialFunction[Expr[Any], T])(using Quotes): List[T] = { + def apply[T: Type, ExpectedType](input: Expr[Any], recurseWhenMatched: Boolean = true)( + matcher: PartialFunction[Expr[Any], T] + )(using Quotes): List[T] = { import quotes.reflect._ val buff: ArrayBuffer[T] = new ArrayBuffer[T]() @@ -71,7 +76,7 @@ object ExprAccumulate { // ============== Could not transform over expression =========== // scala.tasty.reflect.ExprCastError: Expr: ["name" : String] // did not conform to type: String* - override def transformChildren[TF](expr: Expr[TF])(using Type[TF])(using Quotes): Expr[TF] = { + override def transformChildren[TF](expr: Expr[TF])(using Type[TF])(using Quotes): Expr[TF] = try { // If it is a Quat we immediately know it's not a Uprootable (i.e. we have gone too far down the chain) expr match { @@ -97,7 +102,6 @@ object ExprAccumulate { // s"\n===========") expr } - } def isQuat(expr: Expr[_]) = expr.asTerm.tpe <:< TypeRepr.of[io.getquill.quat.Quat] diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala b/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala index 1889c34cf..bd460569d 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/ExprModel.scala @@ -43,10 +43,16 @@ class ExprModel {} sealed trait PlanterExpr[T: scala.quoted.Type, PrepareRow: scala.quoted.Type, Session: scala.quoted.Type] { def uid: String def plant(using Quotes): Expr[Planter[T, PrepareRow, Session]] // TODO Change to 'replant' ? - def nestInline(using Quotes)(call: Option[quotes.reflect.Tree], bindings: List[quotes.reflect.Definition]): PlanterExpr[T, PrepareRow, Session] + def nestInline(using + Quotes + )(call: Option[quotes.reflect.Tree], bindings: List[quotes.reflect.Definition]): PlanterExpr[T, PrepareRow, Session] } -case class EagerListPlanterExpr[T, PrepareRow: Type, Session: Type](uid: String, expr: Expr[List[T]], encoder: Expr[GenericEncoder[T, PrepareRow, Session]])(using val tpe: Type[T], queryTpe: Type[Query[T]]) +case class EagerListPlanterExpr[T, PrepareRow: Type, Session: Type]( + uid: String, + expr: Expr[List[T]], + encoder: Expr[GenericEncoder[T, PrepareRow, Session]] +)(using val tpe: Type[T], queryTpe: Type[Query[T]]) extends PlanterExpr[Query[T], PrepareRow, Session] { def plant(using Quotes): Expr[EagerListPlanter[T, PrepareRow, Session]] = '{ EagerListPlanter[T, PrepareRow, Session]($expr, $encoder, ${ Expr(uid) }) } @@ -59,7 +65,11 @@ case class EagerListPlanterExpr[T, PrepareRow: Type, Session: Type](uid: String, } } -case class EagerPlanterExpr[T: Type, PrepareRow: Type, Session: Type](uid: String, expr: Expr[T], encoder: Expr[GenericEncoder[T, PrepareRow, Session]]) extends PlanterExpr[T, PrepareRow, Session] { +case class EagerPlanterExpr[T: Type, PrepareRow: Type, Session: Type]( + uid: String, + expr: Expr[T], + encoder: Expr[GenericEncoder[T, PrepareRow, Session]] +) extends PlanterExpr[T, PrepareRow, Session] { def plant(using Quotes): Expr[EagerPlanter[T, PrepareRow, Session]] = '{ EagerPlanter[T, PrepareRow, Session]($expr, $encoder, ${ Expr(uid) }) } def nestInline(using Quotes)(call: Option[quotes.reflect.Tree], bindings: List[quotes.reflect.Definition]) = { @@ -71,7 +81,11 @@ case class EagerPlanterExpr[T: Type, PrepareRow: Type, Session: Type](uid: Strin } } -case class InjectableEagerPlanterExpr[T: Type, PrepareRow: Type, Session: Type](uid: String, inject: Expr[_ => T], encoder: Expr[GenericEncoder[T, PrepareRow, Session]]) extends PlanterExpr[T, PrepareRow, Session] { +case class InjectableEagerPlanterExpr[T: Type, PrepareRow: Type, Session: Type]( + uid: String, + inject: Expr[_ => T], + encoder: Expr[GenericEncoder[T, PrepareRow, Session]] +) extends PlanterExpr[T, PrepareRow, Session] { def plant(using Quotes): Expr[InjectableEagerPlanter[T, PrepareRow, Session]] = '{ InjectableEagerPlanter[T, PrepareRow, Session]($inject, $encoder, ${ Expr(uid) }) } def inject(injectee: Expr[Any])(using Quotes): Expr[EagerPlanter[T, PrepareRow, Session]] = @@ -85,7 +99,8 @@ case class InjectableEagerPlanterExpr[T: Type, PrepareRow: Type, Session: Type]( } } -case class LazyPlanterExpr[T: Type, PrepareRow: Type, Session: Type](uid: String, expr: Expr[T]) extends PlanterExpr[T, PrepareRow, Session] { +case class LazyPlanterExpr[T: Type, PrepareRow: Type, Session: Type](uid: String, expr: Expr[T]) + extends PlanterExpr[T, PrepareRow, Session] { def plant(using Quotes): Expr[LazyPlanter[T, PrepareRow, Session]] = '{ LazyPlanter[T, PrepareRow, Session]($expr, ${ Expr(uid) }) } def nestInline(using Quotes)(call: Option[quotes.reflect.Tree], bindings: List[quotes.reflect.Definition]) = { @@ -95,11 +110,12 @@ case class LazyPlanterExpr[T: Type, PrepareRow: Type, Session: Type](uid: String } case class EagerEntitiesPlanterExpr[T, PrepareRow: Type, Session: Type]( - uid: String, - expr: Expr[Iterable[T]], - fieldGetters: Expr[List[InjectableEagerPlanter[?, PrepareRow, Session]]], - fieldClass: ast.CaseClass -)(using val tpe: Type[T], queryTpe: Type[Query[T]]) extends PlanterExpr[Query[T], PrepareRow, Session] { + uid: String, + expr: Expr[Iterable[T]], + fieldGetters: Expr[List[InjectableEagerPlanter[?, PrepareRow, Session]]], + fieldClass: ast.CaseClass +)(using val tpe: Type[T], queryTpe: Type[Query[T]]) + extends PlanterExpr[Query[T], PrepareRow, Session] { def plant(using Quotes): Expr[EagerEntitiesPlanter[T, PrepareRow, Session]] = { val fieldClassExpr = Lifter.caseClass(fieldClass) '{ EagerEntitiesPlanter[T, PrepareRow, Session]($expr, ${ Expr(uid) }, $fieldGetters, $fieldClassExpr) } @@ -124,12 +140,18 @@ object PlanterExpr { object Uprootable { - /** Match the generic parameters [T, PrepareRow, Session] going into InjectableEagerPlanter[T, PrepareRow, Session] */ + /** + * Match the generic parameters [T, PrepareRow, Session] going into + * InjectableEagerPlanter[T, PrepareRow, Session] + */ object MatchInjectableEager { def unapply(using Quotes)(term: quotes.reflect.Term) = { import quotes.reflect._ term match { - case Apply(TypeApply(Select(Ident("InjectableEagerPlanter"), "apply"), List(qtType, prepType, sessionType)), List(liftValue, encoder, Literal(StringConstant(uid)))) => + case Apply( + TypeApply(Select(Ident("InjectableEagerPlanter"), "apply"), List(qtType, prepType, sessionType)), + List(liftValue, encoder, Literal(StringConstant(uid))) + ) => Option((qtType, prepType, sessionType, liftValue, encoder, uid)) case _ => None } @@ -140,10 +162,26 @@ object PlanterExpr { import quotes.reflect._ // underlyingArgument application is needed on expr otherwise the InjectableEagerPlanter matchers won't work no mater how you configure them UntypeExpr(expr.asTerm.underlyingArgument.asExpr) match { - case Is[EagerPlanter[_, _, _]]('{ EagerPlanter.apply[qt, prep, session]($liftValue, $encoder, ${ Expr(uid: String) }) }) => - Some(EagerPlanterExpr[qt, prep, session](uid, liftValue, encoder /* .asInstanceOf[Expr[GenericEncoder[A, A]]] */ ).asInstanceOf[PlanterExpr[_, _, _]]) - case Is[EagerListPlanter[_, _, _]]('{ EagerListPlanter.apply[qt, prep, session]($liftValue, $encoder, ${ Expr(uid: String) }) }) => - Some(EagerListPlanterExpr[qt, prep, session](uid, liftValue, encoder /* .asInstanceOf[Expr[GenericEncoder[A, A]]] */ ).asInstanceOf[PlanterExpr[_, _, _]]) + case Is[EagerPlanter[_, _, _]]('{ + EagerPlanter.apply[qt, prep, session]($liftValue, $encoder, ${ Expr(uid: String) }) + }) => + Some( + EagerPlanterExpr[qt, prep, session]( + uid, + liftValue, + encoder /* .asInstanceOf[Expr[GenericEncoder[A, A]]] */ + ).asInstanceOf[PlanterExpr[_, _, _]] + ) + case Is[EagerListPlanter[_, _, _]]('{ + EagerListPlanter.apply[qt, prep, session]($liftValue, $encoder, ${ Expr(uid: String) }) + }) => + Some( + EagerListPlanterExpr[qt, prep, session]( + uid, + liftValue, + encoder /* .asInstanceOf[Expr[GenericEncoder[A, A]]] */ + ).asInstanceOf[PlanterExpr[_, _, _]] + ) // If you uncomment this instead of '{ InjectableEagerPlanter.apply... it will also work but expr.asTerm.underlyingArgument.asExpr on top is needed // case Unseal(Inlined(call, defs, MatchInjectableEager(qtType, prepType, liftValue, encoder, uid))) => @@ -160,7 +198,13 @@ object PlanterExpr { case ('[qtt], '[prep], '[session]) => encoder.tpe.asType match { case '[enc] => - Some(InjectableEagerPlanterExpr[qtt, prep, session](uid, liftValue.asExpr.asInstanceOf[Expr[_ => qtt]], encoder.asExpr.asInstanceOf[Expr[enc & GenericEncoder[qtt, prep, session]]])) + Some( + InjectableEagerPlanterExpr[qtt, prep, session]( + uid, + liftValue.asExpr.asInstanceOf[Expr[_ => qtt]], + encoder.asExpr.asInstanceOf[Expr[enc & GenericEncoder[qtt, prep, session]]] + ) + ) } } @@ -169,14 +213,27 @@ object PlanterExpr { case Is[LazyPlanter[_, _, _]]('{ LazyPlanter.apply[qt, prep, session]($liftValue, ${ Expr(uid: String) }) }) => Some(LazyPlanterExpr[qt, prep, session](uid, liftValue).asInstanceOf[PlanterExpr[_, _, _]]) - case Is[EagerEntitiesPlanter[_, _, _]]('{ EagerEntitiesPlanter.apply[qt, prep, session]($liftValue, ${ Expr(uid: String) }, $fieldGetters, ${ Unlifter.ast(fieldClassAst) }) }) => + case Is[EagerEntitiesPlanter[_, _, _]]('{ + EagerEntitiesPlanter.apply[qt, prep, session]( + $liftValue, + ${ Expr(uid: String) }, + $fieldGetters, + ${ Unlifter.ast(fieldClassAst) } + ) + }) => val fieldClass = fieldClassAst match { case cc: ast.CaseClass => cc case _ => - report.throwError(s"Found wrong type when unlifting liftQuery class. Expected a case class, was: ${io.getquill.util.Messages.qprint(fieldClassAst)}") + report.throwError( + s"Found wrong type when unlifting liftQuery class. Expected a case class, was: ${io.getquill.util.Messages + .qprint(fieldClassAst)}" + ) } - Some(EagerEntitiesPlanterExpr[qt, prep, session](uid, liftValue, fieldGetters, fieldClass).asInstanceOf[EagerEntitiesPlanterExpr[_, _, _]]) + Some( + EagerEntitiesPlanterExpr[qt, prep, session](uid, liftValue, fieldGetters, fieldClass) + .asInstanceOf[EagerEntitiesPlanterExpr[_, _, _]] + ) case other => None } @@ -236,8 +293,8 @@ object PlanterExpr { Some(vaseExpr) case other => None - }.collect { - case Some(value) => value + }.collect { case Some(value) => + value } // if all the elements match SingleValueVase then return them, otherwise don't @@ -254,7 +311,10 @@ object PlanterExpr { case class QuotedExpr(ast: Expr[Ast], lifts: Expr[List[Planter[_, _, _]]], runtimeQuotes: Expr[List[QuotationVase]]) object QuotedExpr { - /** To be used internally only since it does not account for inlines that could appear in front of it */ + /** + * To be used internally only since it does not account for inlines that could + * appear in front of it + */ private object `Quoted.apply` { def unapply(expr: Expr[Any])(using Quotes): Option[QuotedExpr] = { import quotes.reflect.{Term => QTerm, _} @@ -278,7 +338,11 @@ object QuotedExpr { It is possible that there are inlines, if so they cannot be in the AST since that is re-syntheized on every quote call so any references they use have to be in the lifts/runtimeQuotes. If it is Uprootable there are no runtimeQuotes so we just have to do the nesting in the */ - case SealedInline(parent, defs, `Quoted.apply`(quotedExpr @ QuotedExpr(ast, PlanterExpr.UprootableList(lifts), _))) => + case SealedInline( + parent, + defs, + `Quoted.apply`(quotedExpr @ QuotedExpr(ast, PlanterExpr.UprootableList(lifts), _)) + ) => val nestInlineLifts = lifts.map(_.nestInline(parent, defs)) Some((quotedExpr, nestInlineLifts)) case `Quoted.apply`(quotedExpr @ QuotedExpr(ast, PlanterExpr.UprootableList(lifts), _)) => @@ -304,7 +368,11 @@ sealed trait QuotationLotExpr object QuotationLotExpr { def apply(expr: Expr[Any])(using Quotes): QuotationLotExpr = - unapply(expr).getOrElse { quotes.reflect.report.throwError(s"The expression: ${expr.show} is not a valid Quoted Expression and cannot be unquoted.") } + unapply(expr).getOrElse { + quotes.reflect.report.throwError( + s"The expression: ${expr.show} is not a valid Quoted Expression and cannot be unquoted." + ) + } // Verify that a quotation is inline. It is inline if all the lifts are inline. There is no need // to search the AST since it has been parsed already @@ -355,7 +423,8 @@ object QuotationLotExpr { } /** - * Match all of the different kinds of QuotationLots and unpack their contents. + * Match all of the different kinds of QuotationLots and unpack their + * contents. */ protected object `QuotationLot.apply` { @@ -413,9 +482,8 @@ object QuotationLotExpr { object findUnquotes { def apply(expr: Expr[Any])(using Quotes) = - ExprAccumulate(expr) { - case QuotationLotExpr.Unquoted(vaseExpr) => - vaseExpr + ExprAccumulate(expr) { case QuotationLotExpr.Unquoted(vaseExpr) => + vaseExpr } } @@ -423,7 +491,9 @@ object QuotationLotExpr { def apply(expr: Expr[Any])(using Quotes): QuotationLotExpr = { import quotes.reflect._ unapply(expr).getOrElse { - quotes.reflect.report.throwError(s"The expression: ${Format(Printer.TreeShortCode.show(expr.asTerm))} is not a valid unquotation of a Quoted Expression (i.e. a [quoted-expression].unqoute) and cannot be unquoted.") + quotes.reflect.report.throwError( + s"The expression: ${Format(Printer.TreeShortCode.show(expr.asTerm))} is not a valid unquotation of a Quoted Expression (i.e. a [quoted-expression].unqoute) and cannot be unquoted." + ) } } @@ -459,10 +529,11 @@ object QuotationLotExpr { case class Pointable(expr: scala.quoted.Expr[QuotationLot[Any]]) extends QuotationLotExpr /** - * QuotationLots that have runtime values hance cannot be re-planted into the scala AST and - * they need to be put into QuotationVasees. - * The 'other' argument is meant to be used in various unique circumstances. Right now it - * is just used by a QueryMeta to carry an extractor function that contra-maps back to the T type + * QuotationLots that have runtime values hance cannot be re-planted into the + * scala AST and they need to be put into QuotationVasees. The 'other' + * argument is meant to be used in various unique circumstances. Right now it + * is just used by a QueryMeta to carry an extractor function that contra-maps + * back to the T type */ case class Pluckable(uid: String, expr: Expr[Quoted[Any]], other: List[Expr[_]]) extends QuotationLotExpr { def pluck(using Quotes) = @@ -501,13 +572,13 @@ object QuotationLotExpr { // } case class Uprootable( - uid: String, - ast: Expr[Ast], - inlineLifts: List[PlanterExpr[_, _, _]] + uid: String, + ast: Expr[Ast], + inlineLifts: List[PlanterExpr[_, _, _]] )( - val quotation: Expr[Quoted[Any]], - val bin: Expr[QuotationLot[Any]], - val extra: List[Expr[_]] + val quotation: Expr[Quoted[Any]], + val bin: Expr[QuotationLot[Any]], + val extra: List[Expr[_]] ) extends QuotationLotExpr object Uprootable { diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala b/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala index 056115505..1a2d0e27a 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/Extractors.scala @@ -19,7 +19,7 @@ object Extractors { inline def typeName[T]: String = ${ typeNameImpl[T] } def typeNameImpl[T: Type](using Quotes): Expr[String] = { import quotes.reflect._ - val tpe = TypeRepr.of[T] + val tpe = TypeRepr.of[T] val name: String = tpe.classSymbol.get.name Expr(name) } @@ -53,7 +53,9 @@ object Extractors { } object Term { - def unapply(using Quotes)(term: quotes.reflect.Term): Option[(quotes.reflect.Term, String, List[quotes.reflect.Term])] = { + def unapply(using + Quotes + )(term: quotes.reflect.Term): Option[(quotes.reflect.Term, String, List[quotes.reflect.Term])] = { import quotes.reflect._ term match { // case Apply(Select(body, method), args) => Some((body, method, args)) @@ -66,9 +68,9 @@ object Extractors { } /** - * Matches predicate(bar) or predicate[T](bar) - * where predicate can be a simple method or something selected from something else e.g: - * foo.method(bar) or foo.method[T](bar) + * Matches predicate(bar) or predicate[T](bar) where predicate can be a simple + * method or something selected from something else e.g: foo.method(bar) or + * foo.method[T](bar) */ object Applys { def unapply(using Quotes)(term: quotes.reflect.Term) = { @@ -138,13 +140,14 @@ object Extractors { } /** - * Ignore case where there happens to be an apply e.g. java functions where "str".length in scala - * will translate into "str".lenth() since for java methods () is automatically added in. - * Hence it's `Apply( Select(Literal(IntConstant("str")), "length") )` - * Not just `Select(Literal(IntConstant("str")), "length")` + * Ignore case where there happens to be an apply e.g. java functions where + * "str".length in scala will translate into "str".lenth() since for java + * methods () is automatically added in. Hence it's `Apply( + * Select(Literal(IntConstant("str")), "length") )` Not just + * `Select(Literal(IntConstant("str")), "length")` * - * Note maybe there's even a case where you want multiple empty-applies e.g. foo()() to be ignored - * hence this would be done recursively like `Untype` + * Note maybe there's even a case where you want multiple empty-applies e.g. + * foo()() to be ignored hence this would be done recursively like `Untype` */ object IgnoreApplyNoargs { def unapply(using Quotes)(term: quotes.reflect.Term): Option[quotes.reflect.Term] = { @@ -208,7 +211,8 @@ object Extractors { val cls = tpe.widen.typeSymbol if (!cls.flags.is(Flags.Case)) report.throwError( - s"The class ${Format.TypeRepr(expr.asTerm.tpe)} (symbol: ${cls}) is not a case class in the expression: ${Format.Expr(expr)}\n" + + s"The class ${Format.TypeRepr(expr.asTerm.tpe)} (symbol: ${cls}) is not a case class in the expression: ${Format + .Expr(expr)}\n" + s"Therefore you cannot lookup the property `${property}` on it!" ) else { @@ -216,7 +220,10 @@ object Extractors { cls.caseFields .find(sym => sym.name == property) .getOrElse { - report.throwError(s"Cannot find property '${property}' of (${expr.show}:${cls.name}) fields are: ${cls.caseFields.map(_.name)}", expr) + report.throwError( + s"Cannot find property '${property}' of (${expr.show}:${cls.name}) fields are: ${cls.caseFields.map(_.name)}", + expr + ) } '{ (${ Select(expr.asTerm, method).asExpr }) } @@ -242,7 +249,9 @@ object Extractors { // TODO I like this pattern of doing 'Term' in a sub-object should do more of this in future object Term { - def unapply(using Quotes)(term: quotes.reflect.Term): Option[(String, quotes.reflect.TypeRepr, quotes.reflect.Term)] = { + def unapply(using + Quotes + )(term: quotes.reflect.Term): Option[(String, quotes.reflect.TypeRepr, quotes.reflect.Term)] = { import quotes.reflect._ Untype(term) match { case Lambda(List(ValDef(ident, tpeTree, _)), methodBody) => Some((ident, tpeTree.tpe, methodBody)) @@ -254,29 +263,36 @@ object Extractors { } object Lambda2 { - def unapply(using Quotes)(expr: Expr[_]): Option[(String, quotes.reflect.TypeRepr, String, quotes.reflect.TypeRepr, quoted.Expr[_])] = { + def unapply(using + Quotes + )(expr: Expr[_]): Option[(String, quotes.reflect.TypeRepr, String, quotes.reflect.TypeRepr, quoted.Expr[_])] = { import quotes.reflect._ unapplyTerm(expr.asTerm).map((str1, tpe1, str2, tpe2, expr) => (str1, tpe1, str2, tpe2, expr.asExpr)) } - def unapplyTerm(using Quotes)(term: quotes.reflect.Term): Option[(String, quotes.reflect.TypeRepr, String, quotes.reflect.TypeRepr, quotes.reflect.Term)] = { + def unapplyTerm(using Quotes)( + term: quotes.reflect.Term + ): Option[(String, quotes.reflect.TypeRepr, String, quotes.reflect.TypeRepr, quotes.reflect.Term)] = { import quotes.reflect._ Untype(term) match { - case Lambda(List(ValDef(ident1, tpe1, _), ValDef(ident2, tpe2, _)), methodBody) => Some((ident1, tpe1.tpe, ident2, tpe2.tpe, methodBody)) - case Block(List(), expr) => unapplyTerm(expr) - case _ => None + case Lambda(List(ValDef(ident1, tpe1, _), ValDef(ident2, tpe2, _)), methodBody) => + Some((ident1, tpe1.tpe, ident2, tpe2.tpe, methodBody)) + case Block(List(), expr) => unapplyTerm(expr) + case _ => None } } } object RawLambdaN { - def unapply(using Quotes)(term: quotes.reflect.Term): Option[(List[(String, quotes.reflect.TypeRepr)], quotes.reflect.Term)] = { + def unapply(using + Quotes + )(term: quotes.reflect.Term): Option[(List[(String, quotes.reflect.TypeRepr)], quotes.reflect.Term)] = { import quotes.reflect._ Untype(term) match { case Lambda(valDefs, methodBody) => val idents = - valDefs.map { - case ValDef(ident, typeTree, u) => (ident, typeTree.tpe) + valDefs.map { case ValDef(ident, typeTree, u) => + (ident, typeTree.tpe) } Some((idents, methodBody)) @@ -378,13 +394,13 @@ object Extractors { } def unapply(using Quotes)(term: quotes.reflect.Tree): Option[quotes.reflect.Tree] = Some(recurse(term)) - def apply(using Quotes)(term: quotes.reflect.Tree) = UntypeTree.unapply(term).get + def apply(using Quotes)(term: quotes.reflect.Tree) = UntypeTree.unapply(term).get } /** Summon a named method from the context Context[D, N] */ def summonContextMethod(using Quotes)(name: String, ctx: Expr[_]) = { import quotes.reflect._ - val ctxTerm = ctx.asTerm + val ctxTerm = ctx.asTerm val ctxClass = ctxTerm.tpe.widen.classSymbol.get ctxClass.declaredMethods.filter(f => f.name == name).headOption.getOrElse { throw new IllegalArgumentException(s"Cannot find method '${name}' from context ${ctx.asTerm.tpe.widen}") @@ -527,9 +543,8 @@ object Extractors { } // end Uncast /** - * Matches `case class Person(first: String, last: String)` creation of the forms: - * Person("Joe","Bloggs") - * new Person("Joe","Bloggs") + * Matches `case class Person(first: String, last: String)` creation of the + * forms: Person("Joe","Bloggs") new Person("Joe","Bloggs") */ object CaseClassCreation { // For modules, the _ in Select coule be a couple of things (say the class is Person): @@ -540,10 +555,12 @@ object Extractors { def unapply(using Quotes)(term: quotes.reflect.Term) = { import quotes.reflect._ term match { - case Apply(Select(New(TypeIdent(moduleType)), ""), list) if (list.length == 0) && moduleType.endsWith("$") => true - case Select(This(outerClass), name) => true - case Ident(name) => true - case _ => false + case Apply(Select(New(TypeIdent(moduleType)), ""), list) + if (list.length == 0) && moduleType.endsWith("$") => + true + case Select(This(outerClass), name) => true + case Ident(name) => true + case _ => false } } } @@ -579,7 +596,8 @@ object Extractors { Some((sym.name, sym.caseFields.map(_.name), args.map(_.asExpr))) case ClassSymbolAndUnseal(sym, Apply(Select(New(TypeIdent(_)), ""), args)) if isType[Product](expr) => Some((sym.name, sym.caseFields.map(_.name), args.map(_.asExpr))) - case ClassSymbolAndUnseal(sym, Apply(Select(ModuleCreation(), "apply"), args)) if isType[Product](expr) => // && sym.flags.is(Flags.Case) + case ClassSymbolAndUnseal(sym, Apply(Select(ModuleCreation(), "apply"), args)) + if isType[Product](expr) => // && sym.flags.is(Flags.Case) Some((sym.name, sym.caseFields.map(_.name), args.map(_.asExpr))) case _ => None @@ -627,10 +645,11 @@ object Extractors { isNumeric(tpe) && isPrimitive(tpe) /** - * Check whether one numeric `from` can be primitively assigned to a variable of another `into` - * i.e. short can fit into a int, int can fit into a long. Same with float into a double. - * This is used to determine what can be assigned into what (e.g. in a insert(_.age -> 4.toShort) statement) - * and still be considered a valid transpilation. + * Check whether one numeric `from` can be primitively assigned to a variable + * of another `into` i.e. short can fit into a int, int can fit into a long. + * Same with float into a double. This is used to determine what can be + * assigned into what (e.g. in a insert(_.age -> 4.toShort) statement) and + * still be considered a valid transpilation. */ def numericPrimitiveFitsInto(using Quotes)(into: quotes.reflect.TypeRepr, from: quotes.reflect.TypeRepr) = { import quotes.reflect._ @@ -678,8 +697,8 @@ object Extractors { } /** - * Uninline the term no matter what (TODO should reove the unapply case) that pattern always matches - * and is too confusing + * Uninline the term no matter what (TODO should reove the unapply case) that + * pattern always matches and is too confusing */ object Uninline { def unapply[T: Type](using Quotes)(any: Expr[T]): Option[Expr[T]] = { @@ -700,7 +719,9 @@ object Extractors { // case i @ Inlined(_, pv, v) => if (SummonTranspileConfig.summonTraceTypes(true).contains(TraceType.Meta)) - report.warning(s"Ran into an inline on a clause: ${Format(Printer.TreeStructure.show(i.underlyingArgument))}. Proxy variables will be discarded: ${pv}") + report.warning( + s"Ran into an inline on a clause: ${Format(Printer.TreeStructure.show(i.underlyingArgument))}. Proxy variables will be discarded: ${pv}" + ) v.underlyingArgument case _ => any } @@ -711,20 +732,22 @@ object Extractors { object ConstExpr { /** - * Matches expressions containing literal constant values and extracts the value. + * Matches expressions containing literal constant values and extracts the + * value. * - * - Converts expression containg literal values to their values: - * - `'{1}` -> `1`, `'{2}` -> `2`, ... - * - For all primitive types and `String` + * - Converts expression containg literal values to their values: + * - `'{1}` -> `1`, `'{2}` -> `2`, ... + * - For all primitive types and `String` * - * Usage: - * ``` - * case '{ ... ${expr @ ConstExpr(value)}: T ...} => - * // expr: Expr[T] - * // value: T - * ``` + * Usage: + * ``` + * case '{ ... ${expr @ ConstExpr(value)}: T ...} => + * // expr: Expr[T] + * // value: T + * ``` * - * To directly unlift an expression `expr: Expr[T]` consider using `expr.unlift`/`expr.unliftOrError` insead. + * To directly unlift an expression `expr: Expr[T]` consider using + * `expr.unlift`/`expr.unliftOrError` insead. */ def unapply[T](expr: Expr[T])(using Quotes): Option[T] = { import quotes.reflect._ @@ -745,27 +768,27 @@ object Extractors { } } - def nestInline(using Quotes)(call: Option[quotes.reflect.Tree], defs: List[quotes.reflect.Definition])(expr: Expr[_]): Expr[_] = { + def nestInline(using + Quotes + )(call: Option[quotes.reflect.Tree], defs: List[quotes.reflect.Definition])(expr: Expr[_]): Expr[_] = { import quotes.reflect._ Inlined(call, defs, expr.asTerm).asExpr } /** - * Since things like the QueryParser slow are because Quoted matching is slow (or at least slower then I'd like them to be), - * a simple performance optimization is to check if there's a single-method being matched and if so, what is it's name. - * Since Scala matches unapply causes left-to-right (nested and recursively), we can add a unapply clause - * that will grab the name of the method (if it is a single one being matched which in most cases of the - * QueryParser is exaclty what we're looking for) and then match it to a name that we expect it to have. - * For example, if we're trying to match this: - * {{ - * case '{ ($o: Option[t]).map(${Lambda1(id, idType, body)}) } => - * }} - * We can do the following: - * {{ - * case "map" -@> '{ ($o: Option[t]).map(${Lambda1(id, idType, body)}) } => - * }} - * This will check that there's a `Apply(TypeApply(Select(_, "map"), _), _)` being called - * and then only proceecd into the quoted-matcher if that is the case. + * Since things like the QueryParser slow are because Quoted matching is slow + * (or at least slower then I'd like them to be), a simple performance + * optimization is to check if there's a single-method being matched and if + * so, what is it's name. Since Scala matches unapply causes left-to-right + * (nested and recursively), we can add a unapply clause that will grab the + * name of the method (if it is a single one being matched which in most cases + * of the QueryParser is exaclty what we're looking for) and then match it to + * a name that we expect it to have. For example, if we're trying to match + * this: {{ case '{ ($o: Option[t]).map(${Lambda1(id, idType, body)}) } => }} + * We can do the following: {{ case "map" -@> '{ ($o: + * Option[t]).map(${Lambda1(id, idType, body)}) } => }} This will check that + * there's a `Apply(TypeApply(Select(_, "map"), _), _)` being called and then + * only proceecd into the quoted-matcher if that is the case. */ object MatchingOptimizers { object --> { diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/SummonParser.scala b/quill-sql/src/main/scala/io/getquill/metaprog/SummonParser.scala index 0c01714f6..9a24f4fa4 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/SummonParser.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/SummonParser.scala @@ -12,7 +12,10 @@ import io.getquill.parser.SerializeAst object SummonSerializationBehaviors { import scala.quoted._ - /** Summon any serialization behavior defined on the context. If it does not exist return None */ + /** + * Summon any serialization behavior defined on the context. If it does not + * exist return None + */ def apply()(using Quotes): (Option[SerializeQuat], Option[SerializeAst]) = { import quotes.reflect._ // Find a SerializationBehavior and try to unlift it @@ -40,7 +43,10 @@ object SummonParser { case '[t] => Load.Module[t] match { case Success(parser) => parser.asInstanceOf[ParserFactory] - case Failure(e) => report.throwError(s"Could not summon a parser of type ${Format.TypeOf[t]}. A parser must be a static object created in it's own compilation unit. ${e}") + case Failure(e) => + report.throwError( + s"Could not summon a parser of type ${Format.TypeOf[t]}. A parser must be a static object created in it's own compilation unit. ${e}" + ) } } case None => diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala b/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala index 191516b5a..9ac9b1b6d 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/SummonTranspileConfig.scala @@ -23,9 +23,9 @@ object SummonTranspileConfig { def apply()(using Quotes): TranspileConfig = { import quotes.reflect._ - val traceTypes = summonTraceTypes() + val traceTypes = summonTraceTypes() val disabledPhases = summonPhaseDisables() - val conf = TranspileConfig(disabledPhases, TraceConfig(traceTypes)) + val conf = TranspileConfig(disabledPhases, TraceConfig(traceTypes)) // report.info(conf.toString) conf } @@ -62,8 +62,8 @@ object SummonTranspileConfig { def findHListMembers(baseExpr: Expr[_], typeMemberName: String)(using Quotes): List[quotes.reflect.TypeRepr] = { import quotes.reflect._ - val memberSymbol = baseExpr.asTerm.tpe.termSymbol.memberType(typeMemberName) - val hlistType = baseExpr.asTerm.select(memberSymbol).tpe.widen + val memberSymbol = baseExpr.asTerm.tpe.termSymbol.memberType(typeMemberName) + val hlistType = baseExpr.asTerm.select(memberSymbol).tpe.widen val extractedTypes = recurseConfigList(hlistType.asType) extractedTypes.map { case '[t] => TypeRepr.of[t] }.toList } @@ -78,7 +78,9 @@ object SummonTranspileConfig { case '[head :: tail] => Type.of[head] :: recurseConfigList(Type.of[tail]) case _ => - report.throwError(s"Invalid config list member type: ${Format.Type(listMember)}. Need to be either :: or HNil types.") + report.throwError( + s"Invalid config list member type: ${Format.Type(listMember)}. Need to be either :: or HNil types." + ) } } @@ -98,8 +100,8 @@ private[getquill] object TranspileConfigLiftable { import io.getquill.util.Messages.TraceType given liftOptionalPhase: Lifters.Plain[OptionalPhase] with { - def lift = { - case OptionalPhase.ApplyMap => '{ OptionalPhase.ApplyMap } + def lift = { case OptionalPhase.ApplyMap => + '{ OptionalPhase.ApplyMap } } } @@ -131,14 +133,14 @@ private[getquill] object TranspileConfigLiftable { } given liftTraceConfig: Lifters.Plain[TraceConfig] with { - def lift = { - case TraceConfig(enabledTraces) => '{ io.getquill.util.TraceConfig(${ enabledTraces.expr }) } + def lift = { case TraceConfig(enabledTraces) => + '{ io.getquill.util.TraceConfig(${ enabledTraces.expr }) } } } given liftTranspileConfig: Lifters.Plain[TranspileConfig] with { - def lift = { - case TranspileConfig(disablePhases, traceConfig) => '{ io.getquill.norm.TranspileConfig(${ disablePhases.expr }, ${ traceConfig.expr }) } + def lift = { case TranspileConfig(disablePhases, traceConfig) => + '{ io.getquill.norm.TranspileConfig(${ disablePhases.expr }, ${ traceConfig.expr }) } } } diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/etc/ColumnsFlicer.scala b/quill-sql/src/main/scala/io/getquill/metaprog/etc/ColumnsFlicer.scala index d16b496df..f0d238168 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/etc/ColumnsFlicer.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/etc/ColumnsFlicer.scala @@ -16,10 +16,13 @@ import io.getquill.generic.ElaborateStructure.{TermType, Leaf, Branch} import io.getquill.generic.ElaborateStructure.UdtBehavior object ColumnsFlicer { - inline def apply[T, PrepareRow, Session](inline entity: T, inline columns: List[String]): T = ${ applyImpl[T, PrepareRow, Session]('entity, 'columns) } - private def applyImpl[T: Type, PrepareRow: Type, Session: Type](entity: Expr[T], columns: Expr[List[String]])(using Quotes): Expr[T] = { - new ColumnsFlicerMacro().base[T, PrepareRow, Session](entity, columns) + inline def apply[T, PrepareRow, Session](inline entity: T, inline columns: List[String]): T = ${ + applyImpl[T, PrepareRow, Session]('entity, 'columns) } + private def applyImpl[T: Type, PrepareRow: Type, Session: Type](entity: Expr[T], columns: Expr[List[String]])(using + Quotes + ): Expr[T] = + new ColumnsFlicerMacro().base[T, PrepareRow, Session](entity, columns) } class ColumnsFlicerMacro { @@ -29,8 +32,10 @@ class ColumnsFlicerMacro { } private def recurse[T, PrepareRow, Session, Fields, Types](using - Quotes - )(id: quotes.reflect.Term, fieldsTup: Type[Fields], typesTup: Type[Types])(columns: Expr[List[String]])(using baseType: Type[T], pr: Type[PrepareRow], sess: Type[Session]): List[(Type[_], Expr[_])] = { + Quotes + )(id: quotes.reflect.Term, fieldsTup: Type[Fields], typesTup: Type[Types])( + columns: Expr[List[String]] + )(using baseType: Type[T], pr: Type[PrepareRow], sess: Type[Session]): List[(Type[_], Expr[_])] = { import quotes.reflect._ (fieldsTup, typesTup) match { case ('[field *: fields], '[tpe *: types]) => @@ -44,13 +49,14 @@ class ColumnsFlicerMacro { if (Expr.summon[GenericDecoder[_, Session, tpe, DecodingType.Specific]].isDefined) then { // TODO Maybe use ==1 versus 'true' in this case. See how this plays out with VendorizeBooleans behavior - val liftClause = '{ $columns.contains(${ Expr(fieldString) }) } - val liftedCondition = LiftMacro.apply[Boolean, PrepareRow, Session](liftClause) + val liftClause = '{ $columns.contains(${ Expr(fieldString) }) } + val liftedCondition = LiftMacro.apply[Boolean, PrepareRow, Session](liftClause) val columnSplice: Expr[tpe] = '{ if ($liftedCondition) $childTTerm else null.asInstanceOf[tpe] } // construction of the comparison term: // if (lift(func(List[String].contains("firstName")))) person.firstName else null val expr = (Type.of[tpe], columnSplice) - val rec = recurse[T, PrepareRow, Session, fields, types](id, Type.of[fields], Type.of[types])(columns)(using baseType) + val rec = + recurse[T, PrepareRow, Session, fields, types](id, Type.of[fields], Type.of[types])(columns)(using baseType) expr +: rec } else { @@ -58,8 +64,9 @@ class ColumnsFlicerMacro { // inner class construct e.g. case class Person(name: Name, age: Int), case class Name(first: String, last: String) // so this property would be p.name in a query query[Person].map(p => Person(Name({p.name}.first, {p.name}.last), ...) val subMapping = base[tpe, PrepareRow, Session](childTTerm, columns) - val expr = (Type.of[tpe], subMapping) - val rec = recurse[T, PrepareRow, Session, fields, types](id, Type.of[fields], Type.of[types])(columns)(using baseType) + val expr = (Type.of[tpe], subMapping) + val rec = + recurse[T, PrepareRow, Session, fields, types](id, Type.of[fields], Type.of[types])(columns)(using baseType) expr +: rec } @@ -68,14 +75,26 @@ class ColumnsFlicerMacro { } } - def base[T, PrepareRow, Session](using Quotes)(expr: Expr[T], columns: Expr[List[String]])(using tpe: Type[T], pr: Type[PrepareRow], sess: Type[Session]): Expr[T] = { + def base[T, PrepareRow, Session](using Quotes)(expr: Expr[T], columns: Expr[List[String]])(using + tpe: Type[T], + pr: Type[PrepareRow], + sess: Type[Session] + ): Expr[T] = { import quotes.reflect._ Expr.summon[Mirror.Of[T]] match { case Some(ev) => { // Otherwise, recursively summon fields ev match { - case '{ $m: Mirror.ProductOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } } => - val fields = recurse[T, PrepareRow, Session, elementLabels, elementTypes](expr.asTerm, Type.of[elementLabels], Type.of[elementTypes])(columns)(using tpe) + case '{ + $m: Mirror.ProductOf[T] { + type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes + } + } => + val fields = recurse[T, PrepareRow, Session, elementLabels, elementTypes]( + expr.asTerm, + Type.of[elementLabels], + Type.of[elementTypes] + )(columns)(using tpe) ConstructType[T](m, fields) case _ => report.throwError(s"Mirror for ${Type.of[T]} is not a product") diff --git a/quill-sql/src/main/scala/io/getquill/metaprog/etc/MapFlicer.scala b/quill-sql/src/main/scala/io/getquill/metaprog/etc/MapFlicer.scala index bae120cd1..2b8d0b56a 100644 --- a/quill-sql/src/main/scala/io/getquill/metaprog/etc/MapFlicer.scala +++ b/quill-sql/src/main/scala/io/getquill/metaprog/etc/MapFlicer.scala @@ -20,8 +20,10 @@ import io.getquill.util.Format object MapFlicer { inline def apply[T, PrepareRow, Session](inline entity: T, inline map: Map[String, Any]): Boolean = ${ applyImpl[T, PrepareRow, Session]('entity, 'map) } - private def applyImpl[T: Type, PrepareRow: Type, Session: Type](entity: Expr[T], map: Expr[Map[String, Any]])(using Quotes): Expr[Boolean] = { - val mp = new MapFlicerMacro + private def applyImpl[T: Type, PrepareRow: Type, Session: Type](entity: Expr[T], map: Expr[Map[String, Any]])(using + Quotes + ): Expr[Boolean] = { + val mp = new MapFlicerMacro val ret = mp.base[T, PrepareRow, Session](entity, map) ret } @@ -41,7 +43,9 @@ class MapFlicerMacro { TypeRepr.of(using tpe) <:< TypeRepr.of[Product] } - private def buildClause[T: Type, PrepareRow: Type, Session: Type](core: Expr[T])(map: Expr[Map[String, Any]])(using Quotes): Expr[Boolean] = { + private def buildClause[T: Type, PrepareRow: Type, Session: Type]( + core: Expr[T] + )(map: Expr[Map[String, Any]])(using Quotes): Expr[Boolean] = { import quotes.reflect._ ElaborateStructure.decomposedProductValueDetails[T](ElaborationSide.Encoding, UdtBehavior.Leaf) match { case (terms, Leaf) => report.throwError("Not supported yet", core) @@ -68,7 +72,9 @@ class MapFlicerMacro { // This should be totally fine but if postgres can't handle `? IS NULL` if `?` is a timestamp, even if the type is explicitly state in the encoder // (Reason for this insane behavior is described here: https://github.com/pgjdbc/pgjdbc/issues/276). // So instead we just splice a check if the value is null into the `lift` call and the problem is entirely avoided. - $field == ${ LiftMacro.valueOrString[T, PrepareRow, Session](mapSplice) } || ${ LiftMacro[Boolean, PrepareRow, Session]('{ $mapSplice == null }) } + $field == ${ LiftMacro.valueOrString[T, PrepareRow, Session](mapSplice) } || ${ + LiftMacro[Boolean, PrepareRow, Session]('{ $mapSplice == null }) + } } } @@ -78,9 +84,7 @@ class MapFlicerMacro { // Assuming: actualChildType <:< TypeRepr.of[inner] if (isOptional) '{ - ${ childTTerm.asExprOf[Option[inner]] }.exists(field => - ${ fieldInject[inner]('{ field }) } - ) + ${ childTTerm.asExprOf[Option[inner]] }.exists(field => ${ fieldInject[inner]('{ field }) }) } else fieldInject[inner](childTTerm.asExprOf[inner]) @@ -91,8 +95,12 @@ class MapFlicerMacro { } def base[T, PrepareRow, Session](using - Quotes - )(expr: Expr[T], map: Expr[Map[String, Any]])(using tpe: Type[T], pr: Type[PrepareRow], sess: Type[Session]): Expr[Boolean] = { + Quotes + )(expr: Expr[T], map: Expr[Map[String, Any]])(using + tpe: Type[T], + pr: Type[PrepareRow], + sess: Type[Session] + ): Expr[Boolean] = { import quotes.reflect._ buildClause[T, PrepareRow, Session](expr)(map) } diff --git a/quill-sql/src/main/scala/io/getquill/parser/BooAstSerializer.scala b/quill-sql/src/main/scala/io/getquill/parser/BooAstSerializer.scala index 9f65eb258..f66b3385d 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/BooAstSerializer.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/BooAstSerializer.scala @@ -26,8 +26,8 @@ object AstPicklers { } override def unpickle(implicit state: UnpickleState): Ident = { // Need to do this here because can't put things into the class by value - val a = state.unpickle[String] - val b = state.unpickle[Quat] + val a = state.unpickle[String] + val b = state.unpickle[Quat] val vis = state.unpickle[Visibility](visibilityPickler) Ident.Opinionated(a, b, vis) } @@ -38,7 +38,9 @@ object AstPicklers { override def pickle(value: Entity)(implicit state: PickleState): Unit = { state.pickle(value.name) state.pickle(value.properties) - state.pickle(value.quat) // need to access Quat.Product, the bestQuat member is just Quat because in some cases it can be Unknown + state.pickle( + value.quat + ) // need to access Quat.Product, the bestQuat member is just Quat because in some cases it can be Unknown state.pickle(value.renameable) () } @@ -134,7 +136,7 @@ object AstPicklers { } // ==== Function Picker ==== - implicit val functionPickler: Pickler[Function] = generatePickler[Function] + implicit val functionPickler: Pickler[Function] = generatePickler[Function] implicit val functionApplyPickler: Pickler[FunctionApply] = generatePickler[FunctionApply] // ==== ExternalIdent Picker ==== @@ -300,8 +302,8 @@ object AstPicklers { .addConcreteType[ListContains] // ==== Operation Pickers ==== - implicit val ifPickler: Pickler[If] = generatePickler[If] - implicit val assignmentPickler: Pickler[Assignment] = generatePickler[Assignment] + implicit val ifPickler: Pickler[If] = generatePickler[If] + implicit val assignmentPickler: Pickler[Assignment] = generatePickler[Assignment] implicit val assignmentDualPickler: Pickler[AssignmentDual] = generatePickler[AssignmentDual] given CompositePickler[PrefixUnaryOperator] = @@ -363,15 +365,15 @@ object AstPicklers { // ==== Value Pickers ==== sealed trait ConstantTypes { def v: Any } object ConstantTypes { - case class Int(v: scala.Int) extends ConstantTypes - case class Long(v: scala.Long) extends ConstantTypes - case class Short(v: scala.Short) extends ConstantTypes - case class Float(v: scala.Float) extends ConstantTypes - case class Double(v: scala.Double) extends ConstantTypes - case class Byte(v: scala.Byte) extends ConstantTypes - case class Boolean(v: scala.Boolean) extends ConstantTypes + case class Int(v: scala.Int) extends ConstantTypes + case class Long(v: scala.Long) extends ConstantTypes + case class Short(v: scala.Short) extends ConstantTypes + case class Float(v: scala.Float) extends ConstantTypes + case class Double(v: scala.Double) extends ConstantTypes + case class Byte(v: scala.Byte) extends ConstantTypes + case class Boolean(v: scala.Boolean) extends ConstantTypes case class String(v: java.lang.String) extends ConstantTypes - case object Unit extends ConstantTypes { def v: Unit = () } + case object Unit extends ConstantTypes { def v: Unit = () } def from(constant: Constant): ConstantTypes = constant.v match { case v: scala.Int => ConstantTypes.Int(v) @@ -383,7 +385,8 @@ object AstPicklers { case v: scala.Boolean => ConstantTypes.Boolean(v) case v: java.lang.String => ConstantTypes.String(v) case v: Unit => ConstantTypes.Unit - case other => throw new IllegalArgumentException(s"Serialization Failure: The type `${other}` is not a valid ast.Constant.") + case other => + throw new IllegalArgumentException(s"Serialization Failure: The type `${other}` is not a valid ast.Constant.") } } implicit object constantPickler extends Pickler[Constant] { @@ -419,7 +422,7 @@ object AstPicklers { () } override def unpickle(implicit state: UnpickleState): CaseClass = { - val name = state.unpickle[String] + val name = state.unpickle[String] val children = state.unpickle[LinkedHashMap[String, Ast]].toList new CaseClass(name, children) } @@ -546,7 +549,7 @@ object BooSerializer { object Ast { def serialize(ast: QAst): String = { - val bytes = Pickle.intoBytes(ast) + val bytes = Pickle.intoBytes(ast) val arr: Array[Byte] = new Array[Byte](bytes.remaining()) bytes.get(arr) Base64.getEncoder.encodeToString(arr) @@ -559,7 +562,7 @@ object BooSerializer { object Quat { def serialize(quat: QQuat): String = { - val bytes = Pickle.intoBytes(quat) + val bytes = Pickle.intoBytes(quat) val arr: Array[Byte] = new Array[Byte](bytes.remaining()) bytes.get(arr) Base64.getEncoder.encodeToString(arr) @@ -572,7 +575,7 @@ object BooSerializer { object QuatProduct { def serialize(product: QQuat.Product): String = { - val bytes = Pickle.intoBytes(product) + val bytes = Pickle.intoBytes(product) val arr: Array[Byte] = new Array[Byte](bytes.remaining()) bytes.get(arr) Base64.getEncoder.encodeToString(arr) diff --git a/quill-sql/src/main/scala/io/getquill/parser/Lifter.scala b/quill-sql/src/main/scala/io/getquill/parser/Lifter.scala index 4a0a7ad3f..f099bf392 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/Lifter.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/Lifter.scala @@ -18,8 +18,9 @@ import io.getquill.util.CommonExtensions.Throwable._ /** * Convert constructs of Quill Ast into Expr[Ast]. This allows them to be passed - * back an fourth between inline Quotation blocks during compile-time which should eventually - * be bassed into a run-call-site where they will be evaluated into SQL. + * back an fourth between inline Quotation blocks during compile-time which + * should eventually be bassed into a run-call-site where they will be evaluated + * into SQL. * * Note that liftable List is already taken care of by the Dotty implicits */ @@ -30,7 +31,10 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte def expr: Expr[T] = Expr(t) } - trait LiftAstSerialize[T <: Ast: ClassTag] extends ToExpr[T] with Lifters.WithSerializing.Ast[T] with Lifters.Plain.Ast[T] { + trait LiftAstSerialize[T <: Ast: ClassTag] + extends ToExpr[T] + with Lifters.WithSerializing.Ast[T] + with Lifters.Plain.Ast[T] { def typeTag: Quotes ?=> TType[T] def lift: Quotes ?=> PartialFunction[T, Expr[T]] @@ -42,13 +46,16 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte else if (hasSerializeDisabledTypeclass) Lifter(SerializeQuat.None, SerializeAst.None).liftAst(element).asInstanceOf[Expr[T]] else if (serializeAst == SerializeAst.All) - tryLiftSerialized(element).getOrElse { liftPlainOrFail(element) } + tryLiftSerialized(element).getOrElse(liftPlainOrFail(element)) else liftPlainOrFail(element) } } // end LiftAstSerialize - trait LiftQuatSerialize[T <: Quat: ClassTag] extends ToExpr[T] with Lifters.WithSerializing.Quat[T] with Lifters.Plain.Quat[T] { + trait LiftQuatSerialize[T <: Quat: ClassTag] + extends ToExpr[T] + with Lifters.WithSerializing.Quat[T] + with Lifters.Plain.Quat[T] { def typeTag: Quotes ?=> TType[T] def lift: Quotes ?=> PartialFunction[T, Expr[T]] @@ -60,7 +67,7 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte else if (hasSerializeDisabledTypeclass) Lifter(SerializeQuat.None, SerializeAst.None).liftQuat(element).asInstanceOf[Expr[T]] else if (serializeQuat == SerializeQuat.All) - tryLiftSerialized(element).getOrElse { liftPlainOrFail(element) } + tryLiftSerialized(element).getOrElse(liftPlainOrFail(element)) else liftPlainOrFail(element) } @@ -111,29 +118,28 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte given liftIdent: LiftAstSerialize[AIdent] with { def typeTag = TType.of[AIdent] - def lift = { - case AIdent.Opinionated(name: String, quat, visibility) => - '{ AIdent.Opinionated(${ name.expr }, ${ quat.expr }, ${ visibility.expr }) } + def lift = { case AIdent.Opinionated(name: String, quat, visibility) => + '{ AIdent.Opinionated(${ name.expr }, ${ quat.expr }, ${ visibility.expr }) } } } given liftPropertyAlias: Lifters.Plain[PropertyAlias] with { - def lift = { - case PropertyAlias(a, b) => '{ PropertyAlias(${ a.expr }, ${ b.expr }) } + def lift = { case PropertyAlias(a, b) => + '{ PropertyAlias(${ a.expr }, ${ b.expr }) } } } given liftAssignment: LiftAstSerialize[Assignment] with { def typeTag = TType.of[Assignment] - def lift = { - case Assignment(ident, property, value) => '{ Assignment(${ ident.expr }, ${ property.expr }, ${ value.expr }) } + def lift = { case Assignment(ident, property, value) => + '{ Assignment(${ ident.expr }, ${ property.expr }, ${ value.expr }) } } } given liftAssignmentDual: LiftAstSerialize[AssignmentDual] with { def typeTag = TType.of[AssignmentDual] - def lift = { - case AssignmentDual(ident1, ident2, property, value) => '{ AssignmentDual(${ ident1.expr }, ${ ident2.expr }, ${ property.expr }, ${ value.expr }) } + def lift = { case AssignmentDual(ident1, ident2, property, value) => + '{ AssignmentDual(${ ident1.expr }, ${ ident2.expr }, ${ property.expr }, ${ value.expr }) } } } @@ -148,13 +154,14 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte given liftQuatProduct: LiftQuatSerialize[Quat.Product] with { def typeTag = TType.of[Quat.Product] - def lift = { - case Quat.Product.WithRenamesCompact(name, tpe, fields, values, renamesFrom, renamesTo) => - '{ - io.getquill.quat.Quat.Product.WithRenamesCompact.apply(${ name.expr }, ${ tpe.expr })(${ fields.toList.spliceVarargs }: _*)(${ values.toList.spliceVarargs }: _*)(${ renamesFrom.toList.spliceVarargs }: _*)(${ - renamesTo.toList.spliceVarargs - }: _*) - } + def lift = { case Quat.Product.WithRenamesCompact(name, tpe, fields, values, renamesFrom, renamesTo) => + '{ + io.getquill.quat.Quat.Product.WithRenamesCompact.apply(${ name.expr }, ${ tpe.expr })(${ + fields.toList.spliceVarargs + }: _*)(${ values.toList.spliceVarargs }: _*)(${ renamesFrom.toList.spliceVarargs }: _*)(${ + renamesTo.toList.spliceVarargs + }: _*) + } } } @@ -165,8 +172,11 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte given liftQuat: LiftQuatSerialize[Quat] with { def typeTag = TType.of[Quat] def lift = { - case Quat.Product.WithRenamesCompact(name, tpe, fields, values, renamesFrom, renamesTo) => '{ - io.getquill.quat.Quat.Product.WithRenamesCompact.apply(${ name.expr }, ${ tpe.expr })(${ fields.toList.spliceVarargs }: _*)(${ values.toList.spliceVarargs }: _*)(${ renamesFrom.toList.spliceVarargs }: _*)(${ + case Quat.Product.WithRenamesCompact(name, tpe, fields, values, renamesFrom, renamesTo) => + '{ + io.getquill.quat.Quat.Product.WithRenamesCompact.apply(${ name.expr }, ${ tpe.expr })(${ + fields.toList.spliceVarargs + }: _*)(${ values.toList.spliceVarargs }: _*)(${ renamesFrom.toList.spliceVarargs }: _*)(${ renamesTo.toList.spliceVarargs }: _*) } @@ -225,22 +235,24 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte def typeTag = TType.of[Entity] def lift = { // case ast if (serializeAst == SerializeAst.All) => tryToSerialize[Entity](ast) - case Entity.Opinionated(name: String, list, quat, renameable) => '{ Entity.Opinionated(${ name.expr }, ${ list.expr }, ${ quat.expr }, ${ renameable.expr }) } + case Entity.Opinionated(name: String, list, quat, renameable) => + '{ Entity.Opinionated(${ name.expr }, ${ list.expr }, ${ quat.expr }, ${ renameable.expr }) } } } given liftCaseClass: LiftAstSerialize[CaseClass] with { def typeTag = TType.of[CaseClass] - def lift = { - case cc @ CaseClass(name, lifts) => - '{ CaseClass(${ name.expr }, ${ lifts.expr }) } // List lifter and tuple lifter come built in so can just do Expr(lifts) (or lifts.expr for short) + def lift = { case cc @ CaseClass(name, lifts) => + '{ + CaseClass(${ name.expr }, ${ lifts.expr }) + } // List lifter and tuple lifter come built in so can just do Expr(lifts) (or lifts.expr for short) } } given liftTuple: LiftAstSerialize[Tuple] with { def typeTag = TType.of[Tuple] - def lift = { - case Tuple(values) => '{ Tuple(${ values.expr }) } + def lift = { case Tuple(values) => + '{ Tuple(${ values.expr }) } } } @@ -260,13 +272,16 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte given liftAction: LiftAstSerialize[Action] with { def typeTag = TType.of[Action] def lift = { - case Insert(query: Ast, assignments: List[Assignment]) => '{ Insert(${ query.expr }, ${ assignments.expr }) } - case Update(query: Ast, assignments: List[Assignment]) => '{ Update(${ query.expr }, ${ assignments.expr }) } - case Delete(query: Ast) => '{ Delete(${ query.expr }) } - case Returning(action: Ast, alias: AIdent, body: Ast) => '{ Returning(${ action.expr }, ${ alias.expr }, ${ body.expr }) } - case ReturningGenerated(action: Ast, alias: AIdent, body: Ast) => '{ ReturningGenerated(${ action.expr }, ${ alias.expr }, ${ body.expr }) } - case Foreach(query: Ast, alias: AIdent, body: Ast) => '{ Foreach(${ query.expr }, ${ alias.expr }, ${ body.expr }) } - case OnConflict(a, b, c) => '{ OnConflict(${ a.expr }, ${ b.expr }, ${ c.expr }) } + case Insert(query: Ast, assignments: List[Assignment]) => '{ Insert(${ query.expr }, ${ assignments.expr }) } + case Update(query: Ast, assignments: List[Assignment]) => '{ Update(${ query.expr }, ${ assignments.expr }) } + case Delete(query: Ast) => '{ Delete(${ query.expr }) } + case Returning(action: Ast, alias: AIdent, body: Ast) => + '{ Returning(${ action.expr }, ${ alias.expr }, ${ body.expr }) } + case ReturningGenerated(action: Ast, alias: AIdent, body: Ast) => + '{ ReturningGenerated(${ action.expr }, ${ alias.expr }, ${ body.expr }) } + case Foreach(query: Ast, alias: AIdent, body: Ast) => + '{ Foreach(${ query.expr }, ${ alias.expr }, ${ body.expr }) } + case OnConflict(a, b, c) => '{ OnConflict(${ a.expr }, ${ b.expr }, ${ c.expr }) } } } @@ -287,57 +302,65 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte given liftQuery: LiftAstSerialize[AQuery] with { def typeTag = TType.of[AQuery] def lift = { - case e: Entity => liftEntity(e) - case Filter(query: Ast, alias: AIdent, body: Ast) => '{ Filter(${ query.expr }, ${ alias.expr }, ${ body.expr }) } - case Map(query: Ast, alias: AIdent, body: Ast) => '{ Map(${ query.expr }, ${ alias.expr }, ${ body.expr }) } - case FlatMap(query: Ast, alias: AIdent, body: Ast) => '{ FlatMap(${ query.expr }, ${ alias.expr }, ${ body.expr }) } - case ConcatMap(query: Ast, alias: AIdent, body: Ast) => '{ ConcatMap(${ query.expr }, ${ alias.expr }, ${ body.expr }) } - case SortBy(query: Ast, alias: AIdent, criterias: Ast, ordering: Ast) => '{ SortBy(${ query.expr }, ${ alias.expr }, ${ criterias.expr }, ${ ordering.expr }) } - case GroupBy(query: Ast, alias: AIdent, body: Ast) => '{ GroupBy(${ query.expr }, ${ alias.expr }, ${ body.expr }) } + case e: Entity => liftEntity(e) + case Filter(query: Ast, alias: AIdent, body: Ast) => '{ Filter(${ query.expr }, ${ alias.expr }, ${ body.expr }) } + case Map(query: Ast, alias: AIdent, body: Ast) => '{ Map(${ query.expr }, ${ alias.expr }, ${ body.expr }) } + case FlatMap(query: Ast, alias: AIdent, body: Ast) => + '{ FlatMap(${ query.expr }, ${ alias.expr }, ${ body.expr }) } + case ConcatMap(query: Ast, alias: AIdent, body: Ast) => + '{ ConcatMap(${ query.expr }, ${ alias.expr }, ${ body.expr }) } + case SortBy(query: Ast, alias: AIdent, criterias: Ast, ordering: Ast) => + '{ SortBy(${ query.expr }, ${ alias.expr }, ${ criterias.expr }, ${ ordering.expr }) } + case GroupBy(query: Ast, alias: AIdent, body: Ast) => + '{ GroupBy(${ query.expr }, ${ alias.expr }, ${ body.expr }) } case GroupByMap(query: Ast, byAlias: AIdent, byBody: Ast, mapAlias: AIdent, mapBody: Ast) => '{ GroupByMap(${ query.expr }, ${ byAlias.expr }, ${ byBody.expr }, ${ mapAlias.expr }, ${ mapBody.expr }) } - case Aggregation(operator, query) => '{ Aggregation(${ operator.expr }, ${ query.expr }) } - case Take(query: Ast, num: Ast) => '{ Take(${ query.expr }, ${ num.expr }) } - case Drop(query: Ast, num: Ast) => '{ Drop(${ query.expr }, ${ num.expr }) } - case Union(a, b) => '{ Union(${ a.expr }, ${ b.expr }) } - case UnionAll(a, b) => '{ UnionAll(${ a.expr }, ${ b.expr }) } - case Join(typ, a, b, identA, identB, body) => '{ Join(${ typ.expr }, ${ a.expr }, ${ b.expr }, ${ identA.expr }, ${ identB.expr }, ${ body.expr }) } - case FlatJoin(typ, a, identA, on) => '{ FlatJoin(${ typ.expr }, ${ a.expr }, ${ identA.expr }, ${ on.expr }) } - case DistinctOn(query, alias, body) => '{ DistinctOn(${ query.expr }, ${ alias.expr }, ${ body.expr }) } - case Distinct(a: Ast) => '{ Distinct(${ a.expr }) } - case Nested(a: Ast) => '{ Nested(${ a.expr }) } + case Aggregation(operator, query) => '{ Aggregation(${ operator.expr }, ${ query.expr }) } + case Take(query: Ast, num: Ast) => '{ Take(${ query.expr }, ${ num.expr }) } + case Drop(query: Ast, num: Ast) => '{ Drop(${ query.expr }, ${ num.expr }) } + case Union(a, b) => '{ Union(${ a.expr }, ${ b.expr }) } + case UnionAll(a, b) => '{ UnionAll(${ a.expr }, ${ b.expr }) } + case Join(typ, a, b, identA, identB, body) => + '{ Join(${ typ.expr }, ${ a.expr }, ${ b.expr }, ${ identA.expr }, ${ identB.expr }, ${ body.expr }) } + case FlatJoin(typ, a, identA, on) => '{ FlatJoin(${ typ.expr }, ${ a.expr }, ${ identA.expr }, ${ on.expr }) } + case DistinctOn(query, alias, body) => '{ DistinctOn(${ query.expr }, ${ alias.expr }, ${ body.expr }) } + case Distinct(a: Ast) => '{ Distinct(${ a.expr }) } + case Nested(a: Ast) => '{ Nested(${ a.expr }) } } } given liftAst: LiftAstSerialize[Ast] with { def typeTag = TType.of[Ast] def lift = { - case q: AQuery => liftQuery(q) - case v: Property => liftProperty(v) - case v: AIdent => liftIdent(v) - case v: IterableOperation => liftTraversableOperation(v) - case v: OptionOperation => liftOptionOperation(v) - case a: Assignment => liftAssignment(a) - case a: AssignmentDual => liftAssignmentDual(a) - case a: Action => liftAction(a) - case v: Entity => liftEntity(v) - case v: Tuple => liftTuple(v) - case v: CaseClass => liftCaseClass(v) - case v: Ordering => orderingLiftable(v) - case Constant(ConstantValue(v), quat) => '{ Constant(${ ConstantExpr(v) }, ${ quat.expr }) } - case Constant((), quat) => '{ Constant((), ${ quat.expr }) } - case Function(params: List[AIdent], body: Ast) => '{ Function(${ params.expr }, ${ body.expr }) } - case FunctionApply(function: Ast, values: List[Ast]) => '{ FunctionApply(${ function.expr }, ${ values.expr }) } - case If(cond, thenStmt, elseStmt) => '{ If(${ cond.expr }, ${ thenStmt.expr }, ${ elseStmt.expr }) } - case UnaryOperation(operator: UnaryOperator, a: Ast) => '{ UnaryOperation(${ liftOperator(operator).asInstanceOf[Expr[UnaryOperator]] }, ${ a.expr }) } - case BinaryOperation(a: Ast, operator: BinaryOperator, b: Ast) => '{ BinaryOperation(${ a.expr }, ${ liftOperator(operator).asInstanceOf[Expr[BinaryOperator]] }, ${ b.expr }) } - case v: ScalarTag => liftScalarTag(v) - case v: QuotationTag => liftQuotationTag(v) - case Infix(parts, params, pure, transparent, quat) => '{ Infix(${ parts.expr }, ${ params.expr }, ${ pure.expr }, ${ transparent.expr }, ${ quat.expr }) } - case OnConflict.Excluded(a) => '{ OnConflict.Excluded(${ a.expr }) } - case OnConflict.Existing(a) => '{ OnConflict.Existing(${ a.expr }) } - case NullValue => '{ NullValue } + case q: AQuery => liftQuery(q) + case v: Property => liftProperty(v) + case v: AIdent => liftIdent(v) + case v: IterableOperation => liftTraversableOperation(v) + case v: OptionOperation => liftOptionOperation(v) + case a: Assignment => liftAssignment(a) + case a: AssignmentDual => liftAssignmentDual(a) + case a: Action => liftAction(a) + case v: Entity => liftEntity(v) + case v: Tuple => liftTuple(v) + case v: CaseClass => liftCaseClass(v) + case v: Ordering => orderingLiftable(v) + case Constant(ConstantValue(v), quat) => '{ Constant(${ ConstantExpr(v) }, ${ quat.expr }) } + case Constant((), quat) => '{ Constant((), ${ quat.expr }) } + case Function(params: List[AIdent], body: Ast) => '{ Function(${ params.expr }, ${ body.expr }) } + case FunctionApply(function: Ast, values: List[Ast]) => '{ FunctionApply(${ function.expr }, ${ values.expr }) } + case If(cond, thenStmt, elseStmt) => '{ If(${ cond.expr }, ${ thenStmt.expr }, ${ elseStmt.expr }) } + case UnaryOperation(operator: UnaryOperator, a: Ast) => + '{ UnaryOperation(${ liftOperator(operator).asInstanceOf[Expr[UnaryOperator]] }, ${ a.expr }) } + case BinaryOperation(a: Ast, operator: BinaryOperator, b: Ast) => + '{ BinaryOperation(${ a.expr }, ${ liftOperator(operator).asInstanceOf[Expr[BinaryOperator]] }, ${ b.expr }) } + case v: ScalarTag => liftScalarTag(v) + case v: QuotationTag => liftQuotationTag(v) + case Infix(parts, params, pure, transparent, quat) => + '{ Infix(${ parts.expr }, ${ params.expr }, ${ pure.expr }, ${ transparent.expr }, ${ quat.expr }) } + case OnConflict.Excluded(a) => '{ OnConflict.Excluded(${ a.expr }) } + case OnConflict.Existing(a) => '{ OnConflict.Existing(${ a.expr }) } + case NullValue => '{ NullValue } } } @@ -350,15 +373,15 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte given liftScalarTag: LiftAstSerialize[ScalarTag] with { def typeTag = TType.of[ScalarTag] - def lift = { - case ScalarTag(uid: String, source) => '{ ScalarTag(${ uid.expr }, ${ source.expr }) } + def lift = { case ScalarTag(uid: String, source) => + '{ ScalarTag(${ uid.expr }, ${ source.expr }) } } } given liftQuotationTag: LiftAstSerialize[QuotationTag] with { def typeTag = TType.of[QuotationTag] - def lift = { - case QuotationTag(uid: String) => '{ QuotationTag(${ uid.expr }) } + def lift = { case QuotationTag(uid: String) => + '{ QuotationTag(${ uid.expr }) } } } @@ -383,11 +406,15 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte case StringOperator.toInt => '{ StringOperator.toInt } case StringOperator.startsWith => '{ StringOperator.startsWith } case StringOperator.split => '{ StringOperator.split } - case EqualityOperator.`_==` => '{ EqualityOperator.`_==` } // if you don't do it this way, complains about 'stable identifier error' - case EqualityOperator.`_!=` => '{ EqualityOperator.`_!=` } // (can't use 'ne' here because 'ne' alias is a non-stable identifier? maybe used for something else?) - case BooleanOperator.|| => '{ BooleanOperator.|| } - case BooleanOperator.&& => '{ BooleanOperator.&& } - case BooleanOperator.! => '{ BooleanOperator.! } + case EqualityOperator.`_==` => + '{ EqualityOperator.`_==` } // if you don't do it this way, complains about 'stable identifier error' + case EqualityOperator.`_!=` => + '{ + EqualityOperator.`_!=` + } // (can't use 'ne' here because 'ne' alias is a non-stable identifier? maybe used for something else?) + case BooleanOperator.|| => '{ BooleanOperator.|| } + case BooleanOperator.&& => '{ BooleanOperator.&& } + case BooleanOperator.! => '{ BooleanOperator.! } } } } @@ -395,7 +422,7 @@ case class Lifter(serializeQuat: SerializeQuat, serializeAst: SerializeAst) exte object Lifter extends Lifters.Proxy { val default = new Lifter(SerializeQuat.global, SerializeAst.global) - def NotSerializing = Lifter(SerializeQuat.None, SerializeAst.None) + def NotSerializing = Lifter(SerializeQuat.None, SerializeAst.None) def NotSerializingAst = Lifter(SerializeQuat.global, SerializeAst.None) def WithBehavior(serializeQuat: Option[SerializeQuat] = None, serializeAst: Option[SerializeAst] = None) = Lifter(serializeQuat.getOrElse(SerializeQuat.global), serializeAst.getOrElse(SerializeAst.global)) diff --git a/quill-sql/src/main/scala/io/getquill/parser/Lifters.scala b/quill-sql/src/main/scala/io/getquill/parser/Lifters.scala index 4f0267a68..ff1a5b726 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/Lifters.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/Lifters.scala @@ -23,11 +23,12 @@ object Lifters { trait Proxy { def default: Lifter - def apply(ast: Ast): Quotes ?=> Expr[Ast] = default.liftAst(ast) // can also do ast.lift but this makes some error messages simpler - def action(ast: Action): Quotes ?=> Expr[Action] = default.liftAction(ast) + def apply(ast: Ast): Quotes ?=> Expr[Ast] = + default.liftAst(ast) // can also do ast.lift but this makes some error messages simpler + def action(ast: Action): Quotes ?=> Expr[Action] = default.liftAction(ast) def assignment(ast: Assignment): Quotes ?=> Expr[Assignment] = default.liftAssignment(ast) - def entity(ast: Entity): Quotes ?=> Expr[Entity] = default.liftEntity(ast) - def tuple(ast: Tuple): Quotes ?=> Expr[Tuple] = default.liftTuple(ast) + def entity(ast: Entity): Quotes ?=> Expr[Entity] = default.liftEntity(ast) + def tuple(ast: Tuple): Quotes ?=> Expr[Tuple] = default.liftTuple(ast) def caseClass(ast: CaseClass): Quotes ?=> Expr[CaseClass] = // Need to use lift directly since using liftCaseClass.apply serializes to Ast which can result // in: Expecting io.getquill.ast.CaseClass but got io.getquill.ast.Ast errors. It is hard to understand @@ -35,11 +36,11 @@ object Lifters { // the caseClassAst variable before extracting it from the quotation. default.liftCaseClass.lift(ast) - def ident(ast: AIdent): Quotes ?=> Expr[AIdent] = default.liftIdent(ast) - def quat(quat: Quat): Quotes ?=> Expr[Quat] = default.liftQuat(quat) + def ident(ast: AIdent): Quotes ?=> Expr[AIdent] = default.liftIdent(ast) + def quat(quat: Quat): Quotes ?=> Expr[Quat] = default.liftQuat(quat) def returnAction(returnAction: ReturnAction): Quotes ?=> Expr[ReturnAction] = default.liftReturnAction(returnAction) - def scalarTag(v: ScalarTag): Quotes ?=> Expr[ScalarTag] = default.liftScalarTag(v) + def scalarTag(v: ScalarTag): Quotes ?=> Expr[ScalarTag] = default.liftScalarTag(v) def quotationTag(v: QuotationTag): Quotes ?=> Expr[QuotationTag] = default.liftQuotationTag(v) } // end Proxy @@ -56,10 +57,10 @@ object Lifters { protected def orWarn(element: T, e: Throwable)(using Quotes): Unit = { val msg = s"""Could not unift-serialize the '${element.getClass}': - |${io.getquill.util.Messages.qprint(element)}. - |Performing a regular unlift instead. Due to exception: - |${e.stackTraceToString} - |""".stripMargin + |${io.getquill.util.Messages.qprint(element)}. + |Performing a regular unlift instead. Due to exception: + |${e.stackTraceToString} + |""".stripMargin println(s"WARNING: ${msg}") quotes.reflect.report.warning(msg) } @@ -112,7 +113,7 @@ object Lifters { |${section(qprint(element).toString)} |"""".stripMargin - def unapply(element: T)(using Quotes) = Some(orFail(element)) + def unapply(element: T)(using Quotes) = Some(orFail(element)) def apply(element: T)(using Quotes): Expr[T] = orFail(element) } // end Plain diff --git a/quill-sql/src/main/scala/io/getquill/parser/Parser.scala b/quill-sql/src/main/scala/io/getquill/parser/Parser.scala index f120fe48f..91e018db1 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/Parser.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/Parser.scala @@ -1,6 +1,14 @@ package io.getquill.parser -import io.getquill.ast.{Ident => AIdent, Query => AQuery, Action => AAction, Insert => AInsert, Update => AUpdate, Delete => ADelete, _} +import io.getquill.ast.{ + Ident => AIdent, + Query => AQuery, + Action => AAction, + Insert => AInsert, + Update => AUpdate, + Delete => ADelete, + _ +} import io.getquill.ast import io.getquill.metaprog.PlanterExpr import io.getquill.metaprog.QuotedExpr @@ -43,27 +51,29 @@ trait ParserLibrary extends ParserFactory { // TODO add a before everything identity parser, // a after everything except Inline recurse parser - protected def quotationParser(using Quotes, TranspileConfig) = ParserChain.attempt(QuotationParser(_)) - protected def queryParser(using Quotes, TranspileConfig) = ParserChain.attempt(QueryParser(_)) - protected def infixParser(using Quotes, TranspileConfig) = ParserChain.attempt(InfixParser(_)) + protected def quotationParser(using Quotes, TranspileConfig) = ParserChain.attempt(QuotationParser(_)) + protected def queryParser(using Quotes, TranspileConfig) = ParserChain.attempt(QueryParser(_)) + protected def infixParser(using Quotes, TranspileConfig) = ParserChain.attempt(InfixParser(_)) protected def setOperationsParser(using Quotes, TranspileConfig) = ParserChain.attempt(SetOperationsParser(_)) - protected def queryScalarsParser(using Quotes, TranspileConfig) = ParserChain.attempt(QueryScalarsParser(_)) - protected def traversableOperationParser(using Quotes, TranspileConfig) = ParserChain.attempt(TraversableOperationParser(_)) - protected def patMatchParser(using Quotes, TranspileConfig) = ParserChain.attempt(CasePatMatchParser(_)) - protected def functionParser(using Quotes, TranspileConfig) = ParserChain.attempt(FunctionParser(_)) + protected def queryScalarsParser(using Quotes, TranspileConfig) = ParserChain.attempt(QueryScalarsParser(_)) + protected def traversableOperationParser(using Quotes, TranspileConfig) = + ParserChain.attempt(TraversableOperationParser(_)) + protected def patMatchParser(using Quotes, TranspileConfig) = ParserChain.attempt(CasePatMatchParser(_)) + protected def functionParser(using Quotes, TranspileConfig) = ParserChain.attempt(FunctionParser(_)) protected def functionApplyParser(using Quotes, TranspileConfig) = ParserChain.attempt(FunctionApplyParser(_)) - protected def valParser(using Quotes, TranspileConfig) = ParserChain.attempt(ValParser(_)) - protected def blockParser(using Quotes, TranspileConfig) = ParserChain.attempt(BlockParser(_)) - protected def extrasParser(using Quotes, TranspileConfig) = ParserChain.attempt(ExtrasParser(_)) - protected def operationsParser(using Quotes, TranspileConfig) = ParserChain.attempt(OperationsParser(_)) - protected def orderingParser(using Quotes, TranspileConfig) = ParserChain.attempt(OrderingParser(_)) - protected def genericExpressionsParser(using Quotes, TranspileConfig) = ParserChain.attempt(GenericExpressionsParser(_)) - protected def actionParser(using Quotes, TranspileConfig) = ParserChain.attempt(ActionParser(_)) - protected def batchActionParser(using Quotes, TranspileConfig) = ParserChain.attempt(BatchActionParser(_)) - protected def optionParser(using Quotes, TranspileConfig) = ParserChain.attempt(OptionParser(_)) - protected def ifElseParser(using Quotes, TranspileConfig) = ParserChain.attempt(IfElseParser(_)) + protected def valParser(using Quotes, TranspileConfig) = ParserChain.attempt(ValParser(_)) + protected def blockParser(using Quotes, TranspileConfig) = ParserChain.attempt(BlockParser(_)) + protected def extrasParser(using Quotes, TranspileConfig) = ParserChain.attempt(ExtrasParser(_)) + protected def operationsParser(using Quotes, TranspileConfig) = ParserChain.attempt(OperationsParser(_)) + protected def orderingParser(using Quotes, TranspileConfig) = ParserChain.attempt(OrderingParser(_)) + protected def genericExpressionsParser(using Quotes, TranspileConfig) = + ParserChain.attempt(GenericExpressionsParser(_)) + protected def actionParser(using Quotes, TranspileConfig) = ParserChain.attempt(ActionParser(_)) + protected def batchActionParser(using Quotes, TranspileConfig) = ParserChain.attempt(BatchActionParser(_)) + protected def optionParser(using Quotes, TranspileConfig) = ParserChain.attempt(OptionParser(_)) + protected def ifElseParser(using Quotes, TranspileConfig) = ParserChain.attempt(IfElseParser(_)) protected def complexValueParser(using Quotes, TranspileConfig) = ParserChain.attempt(ComplexValueParser(_)) - protected def valueParser(using Quotes, TranspileConfig) = ParserChain.attempt(ValueParser(_)) + protected def valueParser(using Quotes, TranspileConfig) = ParserChain.attempt(ValueParser(_)) // def userDefined(using quotesInput: Quotes) = Series(new Glosser[Ast] { // val quotes = quotesInput @@ -93,7 +103,9 @@ trait ParserLibrary extends ParserFactory { .orElse(extrasParser) .orElse(ifElseParser) .orElse(complexValueParser) // must go before functionApplyParser since valueParser parsers '.apply on case class' and the functionApply would take that - .orElse(functionApplyParser) // must go before genericExpressionsParser otherwise that will consume the 'apply' clauses + .orElse( + functionApplyParser + ) // must go before genericExpressionsParser otherwise that will consume the 'apply' clauses .orElse(genericExpressionsParser) .complete ParserLibrary.ReadyParser(assembly) @@ -116,9 +128,8 @@ class FunctionApplyParser(rootParse: Parser)(using Quotes, TranspileConfig) exte // c.fail("Anonymous classes aren't supported for function declaration anymore. Use a method with a type parameter instead. " + // "For instance, replace `val q = quote { new { def apply[T](q: Query[T]) = ... } }` by `def q[T] = quote { (q: Query[T] => ... }`") - def attempt = { - case Unseal(Apply(Select(term, "apply"), args)) => - FunctionApply(rootParse(term.asExpr), args.map(arg => rootParse(arg.asExpr))) + def attempt = { case Unseal(Apply(Select(term, "apply"), args)) => + FunctionApply(rootParse(term.asExpr), args.map(arg => rootParse(arg.asExpr))) } } @@ -131,17 +142,16 @@ class FunctionParser(rootParse: Parser)(using Quotes, TranspileConfig) extends P // c.fail("Anonymous classes aren't supported for function declaration anymore. Use a method with a type parameter instead. " + // "For instance, replace `val q = quote { new { def apply[T](q: Query[T]) = ... } }` by `def q[T] = quote { (q: Query[T] => ... }`") - def attempt = { - case Unseal(RawLambdaN(params, body)) => - val subtree = Function(params.map((name, tpe) => cleanIdent(name, tpe)), rootParse(body.asExpr)) - // If there are actions inside the subtree, we need to do some additional sanitizations - // of the variables so that their content will not collide with code that we have generated. - - // TODO Add back once moved to the quill subtree because AvoidAliasConflict is private[getquill] - // if (CollectAst.byType[Action](subtree).nonEmpty) - // AvoidAliasConflict.sanitizeVariables(subtree, dangerousVariables) - // else - subtree + def attempt = { case Unseal(RawLambdaN(params, body)) => + val subtree = Function(params.map((name, tpe) => cleanIdent(name, tpe)), rootParse(body.asExpr)) + // If there are actions inside the subtree, we need to do some additional sanitizations + // of the variables so that their content will not collide with code that we have generated. + + // TODO Add back once moved to the quill subtree because AvoidAliasConflict is private[getquill] + // if (CollectAst.byType[Action](subtree).nonEmpty) + // AvoidAliasConflict.sanitizeVariables(subtree, dangerousVariables) + // else + subtree } } @@ -149,8 +159,8 @@ class ValParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with PatternMatchingValues { import quotes.reflect._ - def attempt = { - case Unseal(ValDefTerm(ast)) => ast + def attempt = { case Unseal(ValDefTerm(ast)) => + ast } } @@ -174,17 +184,18 @@ class BlockParser(val rootParse: Parser)(using Quotes, TranspileConfig) } } -class CasePatMatchParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with PatternMatchingValues { +class CasePatMatchParser(val rootParse: Parser)(using Quotes, TranspileConfig) + extends Parser(rootParse) + with PatternMatchingValues { import quotes.reflect.{Constant => TConstant, _} - def attempt = { - case Unseal(PatMatchTerm(patMatch)) => - patMatch match { - case PatMatch.SimpleClause(ast) => ast - case PatMatch.MultiClause(clauses: List[PatMatchClause]) => nestedIfs(clauses) - case PatMatch.AutoAddedTrivialClause => - Constant(true, Quat.BooleanValue) - } + def attempt = { case Unseal(PatMatchTerm(patMatch)) => + patMatch match { + case PatMatch.SimpleClause(ast) => ast + case PatMatch.MultiClause(clauses: List[PatMatchClause]) => nestedIfs(clauses) + case PatMatch.AutoAddedTrivialClause => + Constant(true, Quat.BooleanValue) + } } def nestedIfs(clauses: List[PatMatchClause]): Ast = @@ -194,7 +205,10 @@ class CasePatMatchParser(val rootParse: Parser)(using Quotes, TranspileConfig) e } } -/** Same as traversableOperationParser, pre-filters that the result-type is a boolean */ +/** + * Same as traversableOperationParser, pre-filters that the result-type is a + * boolean + */ class TraversableOperationParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with Parser.PrefilterType[Boolean] @@ -210,7 +224,9 @@ class TraversableOperationParser(val rootParse: Parser)(using Quotes, TranspileC } } -class OrderingParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with PatternMatchingValues { +class OrderingParser(val rootParse: Parser)(using Quotes, TranspileConfig) + extends Parser(rootParse) + with PatternMatchingValues { import quotes.reflect._ def attempt: History ?=> PartialFunction[Expr[_], Ordering] = { @@ -219,7 +235,9 @@ class OrderingParser(val rootParse: Parser)(using Quotes, TranspileConfig) exten // Doing this on a lower level since there are multiple cases of Order.apply with multiple arguemnts case Unseal(Apply(TypeApply(Select(Ident("Ord"), "apply"), _), args)) => // parse all sub-orderings if this is a composite - val subOrderings = args.map(_.asExpr).map(ordExpression => attempt.lift(ordExpression).getOrElse(failParse(ordExpression, classOf[Ordering]))) + val subOrderings = args + .map(_.asExpr) + .map(ordExpression => attempt.lift(ordExpression).getOrElse(failParse(ordExpression, classOf[Ordering]))) TupleOrdering(subOrderings) case '{ Ord.asc[t] } => Asc @@ -241,7 +259,8 @@ class QuotationParser(rootParse: Parser)(using Quotes, TranspileConfig) extends quotationLot match { case Uprootable(uid, astTree, _) => Unlifter(astTree) case Pluckable(uid, astTree, _) => QuotationTag(uid) - case Pointable(quote) => report.throwError(s"Quotation is invalid for compile-time or processing: ${quote.show}", quote) + case Pointable(quote) => + report.throwError(s"Quotation is invalid for compile-time or processing: ${quote.show}", quote) } case PlanterExpr.UprootableUnquote(expr) => @@ -268,17 +287,27 @@ class ActionParser(val rootParse: Parser)(using Quotes, TranspileConfig) with PropertyParser { import quotes.reflect.{Constant => TConstant, _} - def combineAndCheckAndParse[T: Type, A <: Ast](first: Expr[T], others: Seq[Expr[T]])(checkClause: Expr[_] => Unit)(parseClause: Expr[_] => A): Seq[A] = { + def combineAndCheckAndParse[T: Type, A <: Ast](first: Expr[T], others: Seq[Expr[T]])( + checkClause: Expr[_] => Unit + )(parseClause: Expr[_] => A): Seq[A] = { val assignments = (first.asTerm +: others.map(_.asTerm).filterNot(isNil(_))) assignments.foreach(term => checkClause(term.asExpr)) assignments.map(term => parseClause(term.asExpr)) } def attempt = { - case '{ type t; ($query: EntityQueryModel[`t`]).insert(($first: `t` => (Any, Any)), (${ Varargs(others) }: Seq[`t` => (Any, Any)]): _*) } => + case '{ + type t; + ($query: EntityQueryModel[`t`]) + .insert(($first: `t` => (Any, Any)), (${ Varargs(others) }: Seq[`t` => (Any, Any)]): _*) + } => val assignments = combineAndCheckAndParse(first, others)(AssignmentTerm.CheckTypes(_))(AssignmentTerm.OrFail(_)) AInsert(rootParse(query), assignments.toList) - case '{ type t; ($query: EntityQueryModel[`t`]).update(($first: `t` => (Any, Any)), (${ Varargs(others) }: Seq[`t` => (Any, Any)]): _*) } => + case '{ + type t; + ($query: EntityQueryModel[`t`]) + .update(($first: `t` => (Any, Any)), (${ Varargs(others) }: Seq[`t` => (Any, Any)]): _*) + } => val assignments = combineAndCheckAndParse(first, others)(AssignmentTerm.CheckTypes(_))(AssignmentTerm.OrFail(_)) AUpdate(rootParse(query), assignments.toList) case '{ type t; ($query: EntityQueryModel[`t`]).delete } => @@ -302,7 +331,7 @@ class ActionParser(val rootParse: Parser)(using Quotes, TranspileConfig) report.throwError(s"A 'returning' clause must have arguments.") case '{ ($action: Insert[t]).returning[r](${ Lambda1(id, tpe, body) }) } => - val ident = cleanIdent(id, tpe) + val ident = cleanIdent(id, tpe) val bodyAst = reprocessReturnClause(ident, rootParse(body), action, Type.of[t]) // // TODO Verify that the idiom supports this type of returning clause // idiomReturnCapability match { @@ -316,14 +345,17 @@ class ActionParser(val rootParse: Parser)(using Quotes, TranspileConfig) // idiomReturnCapability.verifyAst(bodyAst) Returning(rootParse(action), ident, bodyAst) case '{ ($action: Insert[t]).returningMany[r](${ Lambda1(id, tpe, body) }) } => - val ident = cleanIdent(id, tpe) + val ident = cleanIdent(id, tpe) val bodyAst = reprocessReturnClause(ident, rootParse(body), action, Type.of[t]) Returning(rootParse(action), ident, bodyAst) case '{ ($action: Insert[t]).onConflictIgnore } => OnConflict(rootParse(action), OnConflict.NoTarget, OnConflict.Ignore) - case '{ type t; ($action: Insert[`t`]).onConflictIgnore(($target: `t` => Any), (${ Varargs(targets) }: Seq[`t` => Any]): _*) } => + case '{ + type t; + ($action: Insert[`t`]).onConflictIgnore(($target: `t` => Any), (${ Varargs(targets) }: Seq[`t` => Any]): _*) + } => val targetProperties = combineAndCheckAndParse(target, targets)(_ => ())(LambdaToProperty.OrFail(_)) OnConflict( rootParse(action), @@ -331,16 +363,30 @@ class ActionParser(val rootParse: Parser)(using Quotes, TranspileConfig) OnConflict.Ignore ) - case '{ type t; ($action: Insert[`t`]).onConflictUpdate(($assign: (`t`, `t`) => (Any, Any)), (${ Varargs(assigns) }: Seq[(`t`, `t`) => (Any, Any)]): _*) } => - val assignments = combineAndCheckAndParse(assign, assigns)(AssignmentTerm.CheckTypes(_))(AssignmentTerm.Double.OrFail(_)) + case '{ + type t; + ($action: Insert[`t`]).onConflictUpdate( + ($assign: (`t`, `t`) => (Any, Any)), + (${ Varargs(assigns) }: Seq[(`t`, `t`) => (Any, Any)]): _* + ) + } => + val assignments = + combineAndCheckAndParse(assign, assigns)(AssignmentTerm.CheckTypes(_))(AssignmentTerm.Double.OrFail(_)) OnConflict( rootParse(action), OnConflict.NoTarget, OnConflict.Update(assignments.toList) ) - case '{ type t; ($action: Insert[`t`]).onConflictUpdate(($target: `t` => Any), (${ Varargs(targets) }: Seq[`t` => Any]): _*)(($assign: (`t`, `t`) => (Any, Any)), (${ Varargs(assigns) }: Seq[(`t`, `t`) => (Any, Any)]): _*) } => - val assignments = combineAndCheckAndParse(assign, assigns)(AssignmentTerm.CheckTypes(_))(AssignmentTerm.Double.OrFail(_)) + case '{ + type t; + ($action: Insert[`t`]).onConflictUpdate(($target: `t` => Any), (${ Varargs(targets) }: Seq[`t` => Any]): _*)( + ($assign: (`t`, `t`) => (Any, Any)), + (${ Varargs(assigns) }: Seq[(`t`, `t`) => (Any, Any)]): _* + ) + } => + val assignments = + combineAndCheckAndParse(assign, assigns)(AssignmentTerm.CheckTypes(_))(AssignmentTerm.Double.OrFail(_)) val targetProperties = combineAndCheckAndParse(target, targets)(_ => ())(LambdaToProperty.OrFail(_)) OnConflict( rootParse(action), @@ -350,25 +396,25 @@ class ActionParser(val rootParse: Parser)(using Quotes, TranspileConfig) // Need to make copies because presently `Action` does not have a .returning method case '{ ($action: Update[t]).returning[r](${ Lambda1(id, tpe, body) }) } => - val ident = cleanIdent(id, tpe) + val ident = cleanIdent(id, tpe) val bodyAst = reprocessReturnClause(ident, rootParse(body), action, Type.of[t]) Returning(rootParse(action), ident, bodyAst) case '{ ($action: Update[t]).returningMany[r](${ Lambda1(id, tpe, body) }) } => - val ident = cleanIdent(id, tpe) + val ident = cleanIdent(id, tpe) val bodyAst = reprocessReturnClause(ident, rootParse(body), action, Type.of[t]) Returning(rootParse(action), ident, bodyAst) case '{ ($action: Delete[t]).returning[r](${ Lambda1(id, tpe, body) }) } => - val ident = cleanIdent(id, tpe) + val ident = cleanIdent(id, tpe) val bodyAst = reprocessReturnClause(ident, rootParse(body), action, Type.of[t]) Returning(rootParse(action), ident, bodyAst) case '{ ($action: Delete[t]).returningMany[r](${ Lambda1(id, tpe, body) }) } => - val ident = cleanIdent(id, tpe) + val ident = cleanIdent(id, tpe) val bodyAst = reprocessReturnClause(ident, rootParse(body), action, Type.of[t]) Returning(rootParse(action), ident, bodyAst) case '{ ($action: Insert[t]).returningGenerated[r](${ Lambda1(id, tpe, body) }) } => - val ident = cleanIdent(id, tpe) + val ident = cleanIdent(id, tpe) val bodyAst = reprocessReturnClause(ident, rootParse(body), action, Type.of[t]) // // TODO Verify that the idiom supports this type of returning clause // idiomReturnCapability match { @@ -390,25 +436,32 @@ class ActionParser(val rootParse: Parser)(using Quotes, TranspileConfig) import io.getquill.generic.ElaborateStructure /** - * Note. Ported from reprocessReturnClause in Scala2-Quill parsing. Logic is much simpler in Scala 3 - * and potentially can be simplified even further. - * In situations where the a `.returning` clause returns the initial record i.e. `.returning(r => r)`, - * we need to expand out the record into it's fields i.e. `.returning(r => (r.foo, r.bar))` - * otherwise the tokenizer would be force to pass `RETURNING *` to the SQL which is a problem - * because the fields inside of the record could arrive out of order in the result set - * (e.g. arrive as `r.bar, r.foo`). Use use the value/flatten methods in order to expand - * the case-class out into fields. + * Note. Ported from reprocessReturnClause in Scala2-Quill parsing. Logic is + * much simpler in Scala 3 and potentially can be simplified even further. In + * situations where the a `.returning` clause returns the initial record i.e. + * `.returning(r => r)`, we need to expand out the record into it's fields + * i.e. `.returning(r => (r.foo, r.bar))` otherwise the tokenizer would be + * force to pass `RETURNING *` to the SQL which is a problem because the + * fields inside of the record could arrive out of order in the result set + * (e.g. arrive as `r.bar, r.foo`). Use use the value/flatten methods in order + * to expand the case-class out into fields. */ private def reprocessReturnClause(ident: AIdent, originalBody: Ast, action: Expr[_], actionType: Type[_]) = (ident == originalBody, action.asTerm.tpe) match { case (true, IsActionType()) => val newBody = actionType match { - case '[at] => ElaborateStructure.ofAribtraryType[at](ident.name, ElaborationSide.Decoding) // elaboration side is Decoding since this is for entity being returned from the Quill query + case '[at] => + ElaborateStructure.ofAribtraryType[at]( + ident.name, + ElaborationSide.Decoding + ) // elaboration side is Decoding since this is for entity being returned from the Quill query } newBody case (true, _) => - report.throwError("Could not process whole-record 'returning' clause. Consider trying to return individual columns.") + report.throwError( + "Could not process whole-record 'returning' clause. Consider trying to return individual columns." + ) case _ => originalBody } @@ -429,7 +482,9 @@ class BatchActionParser(val rootParse: Parser)(using Quotes, TranspileConfig) import quotes.reflect.{Constant => TConstant, _} def attempt = { - case '{ type a <: Action[_] with QAC[_, _]; ($q: Query[t]).foreach[`a`, b](${ Lambda1(ident, tpe, body) })($unq) } => + case '{ + type a <: Action[_] with QAC[_, _]; ($q: Query[t]).foreach[`a`, b](${ Lambda1(ident, tpe, body) })($unq) + } => val id = cleanIdent(ident, tpe) Foreach(rootParse(q), id, rootParse(body)) } @@ -461,8 +516,9 @@ class OptionParser(rootParse: Parser)(using Quotes, TranspileConfig) extends Par } /** - * Note: The -->, -@> etc.. clauses are just to optimize the match by doing an early-exit if possible. - * they don't actaully do any application-relevant logic + * Note: The -->, -@> etc.. clauses are just to optimize the match by doing an + * early-exit if possible. they don't actaully do any application-relevant + * logic */ def attempt = { case "isEmpty" --> '{ ($o: Option[t]).isEmpty } => @@ -535,9 +591,9 @@ class QueryParser(val rootParse: Parser)(using Quotes, TranspileConfig) def attempt = { case expr @ '{ type t; EntityQuery.apply[`t`] } => - val tpe = TypeRepr.of[t] + val tpe = TypeRepr.of[t] val name: String = tpe.classSymbol.get.name - val quat = InferQuat.ofType(tpe).probit + val quat = InferQuat.ofType(tpe).probit warnVerifyNoBranches(VerifyNoBranches.in(quat), expr) Entity(name, List(), quat) @@ -558,20 +614,30 @@ class QueryParser(val rootParse: Parser)(using Quotes, TranspileConfig) case "withFilter" -@> '{ ($q: Query[qt]).withFilter(${ Lambda1(ident, tpe, body) }) } => Filter(rootParse(q), cleanIdent(ident, tpe), rootParse(body)) - case "concatMap" -@@> '{ type t1; type t2; ($q: Query[qt]).concatMap[`t1`, `t2`](${ Lambda1(ident, tpe, body) })($unknown_stuff) } => // ask Alex why is concatMap like this? what's unkonwn_stuff? + case "concatMap" -@@> '{ + type t1; type t2; ($q: Query[qt]).concatMap[`t1`, `t2`](${ Lambda1(ident, tpe, body) })($unknown_stuff) + } => // ask Alex why is concatMap like this? what's unkonwn_stuff? ConcatMap(rootParse(q), cleanIdent(ident, tpe), rootParse(body)) case "union" -@> '{ ($a: Query[t]).union($b) } => Union(rootParse(a), rootParse(b)) case "unionAll" -@> '{ ($a: Query[t]).unionAll($b) } => UnionAll(rootParse(a), rootParse(b)) case "++" -@> '{ ($a: Query[t]).++($b) } => UnionAll(rootParse(a), rootParse(b)) - case ("join" -@> '{ type t1; type t2; ($q1: Query[`t1`]).join[`t1`, `t2`](($q2: Query[`t2`])) }) withOnClause(OnClause(ident1, tpe1, ident2, tpe2, on)) => + case ("join" -@> '{ + type t1; type t2; ($q1: Query[`t1`]).join[`t1`, `t2`](($q2: Query[`t2`])) + }) withOnClause (OnClause(ident1, tpe1, ident2, tpe2, on)) => Join(InnerJoin, rootParse(q1), rootParse(q2), cleanIdent(ident1, tpe1), cleanIdent(ident2, tpe2), rootParse(on)) - case ("leftJoin" -@> '{ type t1; type t2; ($q1: Query[`t1`]).leftJoin[`t1`, `t2`](($q2: Query[`t2`])) }) withOnClause(OnClause(ident1, tpe1, ident2, tpe2, on)) => + case ("leftJoin" -@> '{ + type t1; type t2; ($q1: Query[`t1`]).leftJoin[`t1`, `t2`](($q2: Query[`t2`])) + }) withOnClause (OnClause(ident1, tpe1, ident2, tpe2, on)) => Join(LeftJoin, rootParse(q1), rootParse(q2), cleanIdent(ident1, tpe1), cleanIdent(ident2, tpe2), rootParse(on)) - case ("rightJoin" -@> '{ type t1; type t2; ($q1: Query[`t1`]).rightJoin[`t1`, `t2`](($q2: Query[`t2`])) }) withOnClause(OnClause(ident1, tpe1, ident2, tpe2, on)) => + case ("rightJoin" -@> '{ + type t1; type t2; ($q1: Query[`t1`]).rightJoin[`t1`, `t2`](($q2: Query[`t2`])) + }) withOnClause (OnClause(ident1, tpe1, ident2, tpe2, on)) => Join(RightJoin, rootParse(q1), rootParse(q2), cleanIdent(ident1, tpe1), cleanIdent(ident2, tpe2), rootParse(on)) - case ("fullJoin" -@> '{ type t1; type t2; ($q1: Query[`t1`]).fullJoin[`t1`, `t2`](($q2: Query[`t2`])) }) withOnClause(OnClause(ident1, tpe1, ident2, tpe2, on)) => + case ("fullJoin" -@> '{ + type t1; type t2; ($q1: Query[`t1`]).fullJoin[`t1`, `t2`](($q2: Query[`t2`])) + }) withOnClause (OnClause(ident1, tpe1, ident2, tpe2, on)) => Join(FullJoin, rootParse(q1), rootParse(q2), cleanIdent(ident1, tpe1), cleanIdent(ident2, tpe2), rootParse(on)) case "join" -@> '{ type t1; ($q1: Query[`t1`]).join[`t1`](${ Lambda1(ident1, tpe, on) }) } => @@ -589,8 +655,16 @@ class QueryParser(val rootParse: Parser)(using Quotes, TranspileConfig) case "groupBy" -@> '{ type r; ($q: Query[t]).groupBy[`r`](${ Lambda1(ident1, tpe, body) }) } => GroupBy(rootParse(q), cleanIdent(ident1, tpe), rootParse(body)) - case "groupByMap" -@@> '{ ($q: Query[t]).groupByMap[g, r](${ Lambda1(byIdent, byTpe, byBody) })(${ Lambda1(mapIdent, mapTpe, mapBody) }) } => - GroupByMap(rootParse(q), cleanIdent(byIdent, byTpe), rootParse(byBody), cleanIdent(mapIdent, mapTpe), rootParse(mapBody)) + case "groupByMap" -@@> '{ + ($q: Query[t]).groupByMap[g, r](${ Lambda1(byIdent, byTpe, byBody) })(${ Lambda1(mapIdent, mapTpe, mapBody) }) + } => + GroupByMap( + rootParse(q), + cleanIdent(byIdent, byTpe), + rootParse(byBody), + cleanIdent(mapIdent, mapTpe), + rootParse(mapBody) + ) case "distinctOn" -@> '{ ($q: Query[t]).distinctOn[r](${ Lambda1(ident, tpe, body) }) } => rootParse(q) match { @@ -614,16 +688,22 @@ class QueryParser(val rootParse: Parser)(using Quotes, TranspileConfig) def failFlatJoin(clauseName: String) = report.throwError( s""" - |The .${clauseName} cannot be placed after a join clause in a for-comprehension. Put it before. - |For example. Change: - | for { a <- query[A]; b <- query[B].join(...).nested } to: - | for { a <- query[A]; b <- query[B].nested.join(...) } - |""".stripMargin + |The .${clauseName} cannot be placed after a join clause in a for-comprehension. Put it before. + |For example. Change: + | for { a <- query[A]; b <- query[B].join(...).nested } to: + | for { a <- query[A]; b <- query[B].nested.join(...) } + |""".stripMargin ) import io.getquill.JoinQuery - private case class OnClause(ident1: String, tpe1: quotes.reflect.TypeRepr, ident2: String, tpe2: quotes.reflect.TypeRepr, on: quoted.Expr[_]) + private case class OnClause( + ident1: String, + tpe1: quotes.reflect.TypeRepr, + ident2: String, + tpe2: quotes.reflect.TypeRepr, + on: quoted.Expr[_] + ) private object withOnClause { def unapply(jq: Expr[_]) = jq match { @@ -656,27 +736,31 @@ class SetOperationsParser(val rootParse: Parser)(using Quotes, TranspileConfig) } // end SetOperationsParser /** - * Since QueryParser only matches things that output Query[_], make a separate parser that - * parses things like query.sum, query.size etc... when needed. + * Since QueryParser only matches things that output Query[_], make a separate + * parser that parses things like query.sum, query.size etc... when needed. */ class QueryScalarsParser(val rootParse: Parser)(using Quotes) extends Parser(rootParse) with PropertyAliases { import quotes.reflect.{Constant => TConstant, Ident => TIdent, _} def attempt = { - case '{ type t; type u >: `t`; ($q: Query[`t`]).value[`u`] } => rootParse(q) - case '{ type t; type u >: `t`; ($q: Query[`t`]).min[`u`] } => Aggregation(AggregationOperator.`min`, rootParse(q)) - case '{ type t; type u >: `t`; ($q: Query[`t`]).max[`u`] } => Aggregation(AggregationOperator.`max`, rootParse(q)) - case '{ type t; type u >: `t`; ($q: Query[`t`]).avg[`u`]($n) } => Aggregation(AggregationOperator.`avg`, rootParse(q)) - case '{ type t; type u >: `t`; ($q: Query[`t`]).sum[`u`]($n) } => Aggregation(AggregationOperator.`sum`, rootParse(q)) - case '{ type t; ($q: Query[`t`]).size } => Aggregation(AggregationOperator.`size`, rootParse(q)) - - case '{ type t; type u >: `t`; min[`u`]($q) } => Aggregation(AggregationOperator.`min`, rootParse(q)) - case '{ type t; type u >: `t`; max[`u`]($q) } => Aggregation(AggregationOperator.`max`, rootParse(q)) - case '{ type t; type u >: `t`; count[`u`]($q) } => Aggregation(AggregationOperator.`size`, rootParse(q)) - case '{ type t; type u >: `t`; avg[`u`]($q: Option[`u`])($n) } => Aggregation(AggregationOperator.`avg`, rootParse(q)) - case '{ type t; type u >: `t`; sum[`u`]($q: Option[`u`])($n) } => Aggregation(AggregationOperator.`sum`, rootParse(q)) - case '{ type t; type u >: `t`; avg[`u`]($q: `u`)($n) } => Aggregation(AggregationOperator.`avg`, rootParse(q)) - case '{ type t; type u >: `t`; sum[`u`]($q: `u`)($n) } => Aggregation(AggregationOperator.`sum`, rootParse(q)) + case '{ type t; type u >: `t`; ($q: Query[`t`]).value[`u`] } => rootParse(q) + case '{ type t; type u >: `t`; ($q: Query[`t`]).min[`u`] } => Aggregation(AggregationOperator.`min`, rootParse(q)) + case '{ type t; type u >: `t`; ($q: Query[`t`]).max[`u`] } => Aggregation(AggregationOperator.`max`, rootParse(q)) + case '{ type t; type u >: `t`; ($q: Query[`t`]).avg[`u`]($n) } => + Aggregation(AggregationOperator.`avg`, rootParse(q)) + case '{ type t; type u >: `t`; ($q: Query[`t`]).sum[`u`]($n) } => + Aggregation(AggregationOperator.`sum`, rootParse(q)) + case '{ type t; ($q: Query[`t`]).size } => Aggregation(AggregationOperator.`size`, rootParse(q)) + + case '{ type t; type u >: `t`; min[`u`]($q) } => Aggregation(AggregationOperator.`min`, rootParse(q)) + case '{ type t; type u >: `t`; max[`u`]($q) } => Aggregation(AggregationOperator.`max`, rootParse(q)) + case '{ type t; type u >: `t`; count[`u`]($q) } => Aggregation(AggregationOperator.`size`, rootParse(q)) + case '{ type t; type u >: `t`; avg[`u`]($q: Option[`u`])($n) } => + Aggregation(AggregationOperator.`avg`, rootParse(q)) + case '{ type t; type u >: `t`; sum[`u`]($q: Option[`u`])($n) } => + Aggregation(AggregationOperator.`sum`, rootParse(q)) + case '{ type t; type u >: `t`; avg[`u`]($q: `u`)($n) } => Aggregation(AggregationOperator.`avg`, rootParse(q)) + case '{ type t; type u >: `t`; sum[`u`]($q: `u`)($n) } => Aggregation(AggregationOperator.`sum`, rootParse(q)) } } @@ -695,7 +779,7 @@ class InfixParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends } def genericInfix(i: Expr[_])(isPure: Boolean, isTransparent: Boolean, quat: Quat)(using History) = { - val (parts, paramsExprs) = InfixComponents.unapply(i).getOrElse { failParse(i, classOf[Infix]) } + val (parts, paramsExprs) = InfixComponents.unapply(i).getOrElse(failParse(i, classOf[Infix])) if (parts.exists(_.endsWith("#"))) { PrepareDynamicInfix(parts.toList, paramsExprs.toList)(isPure, isTransparent, quat) } else { @@ -746,7 +830,9 @@ class InfixParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends case Unseal(TApply(InlineGenericIdent(), List(value))) => unapply(value.asExpr) case InterpolatorClause(partsExpr, params) => - val parts = StringContextExpr.unapply(partsExpr).getOrElse { failParse(partsExpr, "Cannot parse a valid StringContext") } + val parts = StringContextExpr.unapply(partsExpr).getOrElse { + failParse(partsExpr, "Cannot parse a valid StringContext") + } Some((parts, params)) case _ => None @@ -754,7 +840,9 @@ class InfixParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends } // end InfixComponents private object PrepareDynamicInfix { - def apply(parts: List[String], params: List[Expr[Any]])(isPure: Boolean, isTransparent: Boolean, quat: Quat)(using History): Dynamic = { + def apply(parts: List[String], params: List[Expr[Any]])(isPure: Boolean, isTransparent: Boolean, quat: Quat)(using + History + ): Dynamic = { // Basically the way it works is like this // // sql"foo#${bar}baz" becomes: @@ -784,54 +872,66 @@ class InfixParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends } val fused = - (elements.foldLeft(List.empty[InfixElement]) { - // Shorthand: - // fus: fusion, add: addition - // acc: accum, ele: element - // LE: List.empty (a.k.a Nil) - // Part("foo") is really Part('{"foo"}) or Part(Expr("foo")) - // (m1) is the 1st match clause below, (m2) is the 2nd one - - // Here are two examples of thos this works - // - // List( Part('{"foo"}), Part('{"bar"}), Part('{"baz"}) ) => - // (0 ) LE - remaining:List( Part('{"bar"}), Part('{"baz"}) ) => - // (m2) elm:Part("foo") :: acc:LE - remaining:List( Part('{"bar"}), Part('{"baz"}) ) => - // (m1) fus:Part(fusion:"foo" + add:"bar") :: LE - remaining:List( Part('{"baz"}) ) => - // (m1) fus:Part(fusion:"foo" + "bar" + add:"baz")) :: LE - remaining:List() => - // - // List( Part("foo"), Param(bar), Part("baz") ) => - // (0 ) LE - remaining:List( Part('{"foo"}), Part('{"bar"}), Part('{"baz"}), Part('{"blin"}) ) => - // (m2) elm:Part("foo") :: acc:LE - remaining:List( Param(bar), Part('{"baz"}), Part('{"blin"}) ) => - // (m2) elm:Param(bar) :: acc:(Part('{"foo"})) :: LE) - remaining:List( Part('{"baz"}), Part('{"blin"}) ) => - // (m2) elm:Part("baz") :: acc:(Param(bar) :: Part("foo")) :: LE) - remaining:List( Part('{"blin"}) ) => - // (m1) fus:Part("baz") :: acc:(Part("blin") }) :: Param(bar) :: Part("foo") :: LE - remaining: List() - // - // Note that once the process is done the elements are reversed - - // (m1) - case (Part(fusion) :: tail, Part(addition)) => - Part('{ $fusion + $addition }) :: tail - // (m2) - case (accum, element) => - element :: accum - }).reverse + (elements + .foldLeft(List.empty[InfixElement]) { + // Shorthand: + // fus: fusion, add: addition + // acc: accum, ele: element + // LE: List.empty (a.k.a Nil) + // Part("foo") is really Part('{"foo"}) or Part(Expr("foo")) + // (m1) is the 1st match clause below, (m2) is the 2nd one + + // Here are two examples of thos this works + // + // List( Part('{"foo"}), Part('{"bar"}), Part('{"baz"}) ) => + // (0 ) LE - remaining:List( Part('{"bar"}), Part('{"baz"}) ) => + // (m2) elm:Part("foo") :: acc:LE - remaining:List( Part('{"bar"}), Part('{"baz"}) ) => + // (m1) fus:Part(fusion:"foo" + add:"bar") :: LE - remaining:List( Part('{"baz"}) ) => + // (m1) fus:Part(fusion:"foo" + "bar" + add:"baz")) :: LE - remaining:List() => + // + // List( Part("foo"), Param(bar), Part("baz") ) => + // (0 ) LE - remaining:List( Part('{"foo"}), Part('{"bar"}), Part('{"baz"}), Part('{"blin"}) ) => + // (m2) elm:Part("foo") :: acc:LE - remaining:List( Param(bar), Part('{"baz"}), Part('{"blin"}) ) => + // (m2) elm:Param(bar) :: acc:(Part('{"foo"})) :: LE) - remaining:List( Part('{"baz"}), Part('{"blin"}) ) => + // (m2) elm:Part("baz") :: acc:(Param(bar) :: Part("foo")) :: LE) - remaining:List( Part('{"blin"}) ) => + // (m1) fus:Part("baz") :: acc:(Part("blin") }) :: Param(bar) :: Part("foo") :: LE - remaining: List() + // + // Note that once the process is done the elements are reversed + + // (m1) + case (Part(fusion) :: tail, Part(addition)) => + Part('{ $fusion + $addition }) :: tail + // (m2) + case (accum, element) => + element :: accum + }) + .reverse val newParts = - fused.collect { - case Part(v) => v + fused.collect { case Part(v) => + v } val newParams = - fused.collect { - case Param(v) => Lifter(rootParse(v)) + fused.collect { case Param(v) => + Lifter(rootParse(v)) } // If there is a lift that one of the static parts has, the lift should be extracted anyway // from the outer quote. Have a look at the "with lift" test in InfixText.scala for more detail Dynamic( '{ - Quoted(Infix(${ Expr.ofList(newParts) }, ${ Expr.ofList(newParams) }, ${ Expr(isPure) }, ${ Expr(isTransparent) }, ${ Lifter.quat(quat) }), Nil, Nil) + Quoted( + Infix( + ${ Expr.ofList(newParts) }, + ${ Expr.ofList(newParams) }, + ${ Expr(isPure) }, + ${ Expr(isTransparent) }, + ${ Lifter.quat(quat) } + ), + Nil, + Nil + ) }, quat ) @@ -845,7 +945,9 @@ class InfixParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends } // end InfixParser -class ExtrasParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with ComparisonTechniques { +class ExtrasParser(val rootParse: Parser)(using Quotes, TranspileConfig) + extends Parser(rootParse) + with ComparisonTechniques { import quotes.reflect._ private object ExtrasModule { @@ -877,7 +979,10 @@ class ExtrasParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends } } -class OperationsParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with ComparisonTechniques with QuatMaking { +class OperationsParser(val rootParse: Parser)(using Quotes, TranspileConfig) + extends Parser(rootParse) + with ComparisonTechniques + with QuatMaking { import quotes.reflect._ import io.getquill.ast.Infix // Note that if we import Dsl._ here then the "like" construct @@ -942,7 +1047,8 @@ class OperationsParser(val rootParse: Parser)(using Quotes, TranspileConfig) ext // Unary minus symbol i.e. val num: Int = 4; quote { -lift(num) }. // This is done term-level or we would have to do it for every numeric type - case Unseal(Select(num, "unary_-")) if isNumeric(num.tpe) => UnaryOperation(NumericOperator.-, rootParse(num.asExpr)) + case Unseal(Select(num, "unary_-")) if isNumeric(num.tpe) => + UnaryOperation(NumericOperator.-, rootParse(num.asExpr)) // In the future a casting system should be implemented to handle these cases. // Until then, let the SQL dialect take care of the auto-conversion. @@ -969,13 +1075,15 @@ class OperationsParser(val rootParse: Parser)(using Quotes, TranspileConfig) ext case NamedOp1(left, "+", right) if is[String](left) || is[String](right) => BinaryOperation(rootParse(left), StringOperator.+, rootParse(right)) - case '{ ($i: String).toString } => rootParse(i) - case '{ ($str: String).toUpperCase } => UnaryOperation(StringOperator.toUpperCase, rootParse(str)) - case '{ ($str: String).toLowerCase } => UnaryOperation(StringOperator.toLowerCase, rootParse(str)) - case '{ ($str: String).toLong } => UnaryOperation(StringOperator.toLong, rootParse(str)) - case '{ ($str: String).toInt } => UnaryOperation(StringOperator.toInt, rootParse(str)) - case '{ ($left: String).startsWith($right) } => BinaryOperation(rootParse(left), StringOperator.startsWith, rootParse(right)) - case '{ ($left: String).split($right: String) } => BinaryOperation(rootParse(left), StringOperator.split, rootParse(right)) + case '{ ($i: String).toString } => rootParse(i) + case '{ ($str: String).toUpperCase } => UnaryOperation(StringOperator.toUpperCase, rootParse(str)) + case '{ ($str: String).toLowerCase } => UnaryOperation(StringOperator.toLowerCase, rootParse(str)) + case '{ ($str: String).toLong } => UnaryOperation(StringOperator.toLong, rootParse(str)) + case '{ ($str: String).toInt } => UnaryOperation(StringOperator.toInt, rootParse(str)) + case '{ ($left: String).startsWith($right) } => + BinaryOperation(rootParse(left), StringOperator.startsWith, rootParse(right)) + case '{ ($left: String).split($right: String) } => + BinaryOperation(rootParse(left), StringOperator.split, rootParse(right)) // 1 + 1 // Apply(Select(Lit(1), +), Lit(1)) @@ -993,7 +1101,8 @@ class OperationsParser(val rootParse: Parser)(using Quotes, TranspileConfig) ext object NumericOperation { def unapply(expr: Expr[_])(using History): Option[BinaryOperation] = UntypeExpr(expr) match { - case NamedOp1(left, NumericOpLabel(binaryOp), right) if (isNumeric(left.asTerm.tpe) && isNumeric(right.asTerm.tpe)) => + case NamedOp1(left, NumericOpLabel(binaryOp), right) + if (isNumeric(left.asTerm.tpe) && isNumeric(right.asTerm.tpe)) => Some(BinaryOperation(rootParse(left), binaryOp, rootParse(right))) case _ => None } @@ -1017,13 +1126,12 @@ class OperationsParser(val rootParse: Parser)(using Quotes, TranspileConfig) ext } /** - * Should check that something is a null-constant basically before anything else because - * null-constant can match anything e.g. a (something: SomeValue) clause. Found this out - * when tried to do just '{ (infix: InfixValue) } and 'null' matched it + * Should check that something is a null-constant basically before anything else + * because null-constant can match anything e.g. a (something: SomeValue) + * clause. Found this out when tried to do just '{ (infix: InfixValue) } and + * 'null' matched it */ -class ValueParser(rootParse: Parser)(using Quotes, TranspileConfig) - extends Parser(rootParse) - with QuatMaking { +class ValueParser(rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with QuatMaking { import quotes.reflect.{Constant => TConstant, Ident => TIdent, _} def attempt = { @@ -1059,7 +1167,10 @@ class ComplexValueParser(rootParse: Parser)(using Quotes, TranspileConfig) case CaseClassCreation(ccName, fields, args) => if (fields.length != args.length) - throw new IllegalArgumentException(s"In Case Class ${ccName}, does not have the same number of fields (${fields.length}) as it does arguments ${args.length} (fields: ${fields}, args: ${args.map(_.show)})") + throw new IllegalArgumentException( + s"In Case Class ${ccName}, does not have the same number of fields (${fields.length}) as it does arguments ${args.length} (fields: ${fields}, args: ${args + .map(_.show)})" + ) val argsAst = args.map(rootParse(_)) CaseClass(ccName, fields.zip(argsAst)) @@ -1068,7 +1179,9 @@ class ComplexValueParser(rootParse: Parser)(using Quotes, TranspileConfig) } } -class GenericExpressionsParser(val rootParse: Parser)(using Quotes, TranspileConfig) extends Parser(rootParse) with PropertyParser { +class GenericExpressionsParser(val rootParse: Parser)(using Quotes, TranspileConfig) + extends Parser(rootParse) + with PropertyParser { import quotes.reflect.{Constant => TConstant, Ident => TIdent, Apply => TApply, _} import reflect.Selectable.reflectiveSelectable @@ -1080,7 +1193,10 @@ class GenericExpressionsParser(val rootParse: Parser)(using Quotes, TranspileCon val propName = propNameExpr match { case Expr(v) => v - case _ => report.throwError(s"Cannot parse the property ${Format.Expr(propNameExpr)}. It was not a static string property.") + case _ => + report.throwError( + s"Cannot parse the property ${Format.Expr(propNameExpr)}. It was not a static string property." + ) } Property(rootParse(v), propName) diff --git a/quill-sql/src/main/scala/io/getquill/parser/ParserHelpers.scala b/quill-sql/src/main/scala/io/getquill/parser/ParserHelpers.scala index b9a69d388..a612ee183 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/ParserHelpers.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/ParserHelpers.scala @@ -58,8 +58,9 @@ object ParserHelpers { object TwoComponents { def unapply(expr: Expr[_])(using Quotes) = UntypeExpr(expr) match { - case Lambda2(ident1, identTpe1, ident2, identTpe2, ArrowFunction(prop, value)) => Some((ident1, identTpe1, ident2, identTpe2, prop, value)) - case _ => None + case Lambda2(ident1, identTpe1, ident2, identTpe2, ArrowFunction(prop, value)) => + Some((ident1, identTpe1, ident2, identTpe2, prop, value)) + case _ => None } } @@ -67,13 +68,14 @@ object ParserHelpers { def checkPropAndValue(parent: Expr[Any], prop: Expr[Any], value: Expr[Any])(using Quotes) = { import quotes.reflect._ val valueTpe = value.asTerm.tpe.widen - val propTpe = prop.asTerm.tpe.widen + val propTpe = prop.asTerm.tpe.widen // If both numbers are numeric and primitive e.g. `_.age -> 22.toShort` (in: `query[Person].insert(_.age -> 22.toShort)`) // then check if one can fit into another. If it can the assignment is valid if (isNumericPrimitive(propTpe) && isNumericPrimitive(valueTpe)) { if (!(numericPrimitiveFitsInto(propTpe, valueTpe))) { report.throwError( - s"The primitive numeric value ${Format.TypeRepr(valueTpe)} in ${Format.Expr(value)} is to large to fit into the ${Format.TypeRepr(propTpe)} in ${Format.Expr(prop)}.", + s"The primitive numeric value ${Format.TypeRepr(valueTpe)} in ${Format + .Expr(value)} is to large to fit into the ${Format.TypeRepr(propTpe)} in ${Format.Expr(prop)}.", parent ) } @@ -81,7 +83,8 @@ object ParserHelpers { // Otherwise check if the property is a subtype of the value that is being assigned to it else if (!(valueTpe <:< propTpe)) { report.throwError( - s"The ${Format.TypeRepr(valueTpe)} value ${Format.Expr(value)} cannot be assigned to the ${Format.TypeRepr(propTpe)} property ${Format.Expr(prop)} because they are not the same type (or a subtype).", + s"The ${Format.TypeRepr(valueTpe)} value ${Format.Expr(value)} cannot be assigned to the ${Format + .TypeRepr(propTpe)} property ${Format.Expr(prop)} because they are not the same type (or a subtype).", parent ) } @@ -98,7 +101,7 @@ object ParserHelpers { } // end CheckTypes def OrFail(expr: Expr[_])(using Quotes, History) = - unapply(expr).getOrElse { failParse(expr, classOf[Assignment]) } + unapply(expr).getOrElse(failParse(expr, classOf[Assignment])) def unapply(expr: Expr[_])(using Quotes, History): Option[Assignment] = UntypeExpr(expr) match { @@ -109,7 +112,7 @@ object ParserHelpers { object Double { def OrFail(expr: Expr[_])(using Quotes, History) = - unapply(expr).getOrElse { failParse(expr, classOf[AssignmentDual]) } + unapply(expr).getOrElse(failParse(expr, classOf[AssignmentDual])) def unapply(expr: Expr[_])(using Quotes, History): Option[AssignmentDual] = UntypeExpr(expr) match { case TwoComponents(ident1, identTpe1, ident2, identTpe2, prop, value) => @@ -125,7 +128,7 @@ object ParserHelpers { } } // end AssignmentTerm - } // end Assignments + } // end Assignments trait PropertyParser(implicit val qctx: Quotes) { import quotes.reflect.{Ident => TIdent, ValDef => TValDef, _} @@ -227,8 +230,9 @@ object ParserHelpers { } /** - * Helpers for different behaviors Quill supports of object equality. This is non-trivial since Quill has to make sense - * of different equality paradigms across ANSI-SQL and Scala for objects that may be Optional or not. Several + * Helpers for different behaviors Quill supports of object equality. This is + * non-trivial since Quill has to make sense of different equality paradigms + * across ANSI-SQL and Scala for objects that may be Optional or not. Several * techniques are implemented to resolve these inconsistencies. */ trait ComparisonTechniques { @@ -237,47 +241,59 @@ object ParserHelpers { def rootParse: Parser sealed trait EqualityBehavior { def operator: BinaryOperator } - case object Equal extends EqualityBehavior { def operator: BinaryOperator = EqualityOperator.`_==` } + case object Equal extends EqualityBehavior { def operator: BinaryOperator = EqualityOperator.`_==` } case object NotEqual extends EqualityBehavior { def operator: BinaryOperator = EqualityOperator.`_!=` } /** - * Taken from the identically named method in Parser.scala in Scala2-Quill. Much of this logic - * is not macro specific so a good deal of it can be refactored out into the quill-sql-portable module. - * Do equality checking on the database level with the same truth-table as idiomatic scala + * Taken from the identically named method in Parser.scala in Scala2-Quill. + * Much of this logic is not macro specific so a good deal of it can be + * refactored out into the quill-sql-portable module. Do equality checking + * on the database level with the same truth-table as idiomatic scala */ - def equalityWithInnerTypechecksIdiomatic(using Quotes, History)(left: quotes.reflect.Term, right: quotes.reflect.Term)(equalityBehavior: EqualityBehavior) = { + def equalityWithInnerTypechecksIdiomatic(using + Quotes, + History + )(left: quotes.reflect.Term, right: quotes.reflect.Term)(equalityBehavior: EqualityBehavior) = { import quotes.reflect.{Ident => TIdent, ValDef => TValDef, _} import io.getquill.ast.Implicits._ val (leftIsOptional, rightIsOptional) = checkInnerTypes(left, right, ForbidInnerCompare) - val a = rootParse(left.asExpr) - val b = rootParse(right.asExpr) - val comparison = BinaryOperation(a, equalityBehavior.operator, b) + val a = rootParse(left.asExpr) + val b = rootParse(right.asExpr) + val comparison = BinaryOperation(a, equalityBehavior.operator, b) (leftIsOptional, rightIsOptional, equalityBehavior) match { // == two optional things. Either they are both null or they are both defined and the same - case (true, true, Equal) => (OptionIsEmpty(a) +&&+ OptionIsEmpty(b)) +||+ (OptionIsDefined(a) +&&+ OptionIsDefined(b) +&&+ comparison) + case (true, true, Equal) => + (OptionIsEmpty(a) +&&+ OptionIsEmpty(b)) +||+ (OptionIsDefined(a) +&&+ OptionIsDefined(b) +&&+ comparison) // != two optional things. Either one is null and the other isn't. Or they are both defined and have different values - case (true, true, NotEqual) => (OptionIsDefined(a) +&&+ OptionIsEmpty(b)) +||+ (OptionIsEmpty(a) +&&+ OptionIsDefined(b)) +||+ comparison + case (true, true, NotEqual) => + (OptionIsDefined(a) +&&+ OptionIsEmpty(b)) +||+ (OptionIsEmpty(a) +&&+ OptionIsDefined(b)) +||+ comparison // No additional logic when both sides are defined case (false, false, _) => comparison // Comparing an optional object with a non-optional object is not allowed when using scala-idiomatic optional behavior case (lop, rop, _) => val lopString = (if (lop) "Optional" else "Non-Optional") + s" ${left}}" val ropString = (if (rop) "Optional" else "Non-Optional") + s" ${right}}" - report.throwError(s"Cannot compare ${lopString} with ${ropString} using operator ${equalityBehavior.operator}", left.asExpr) + report.throwError( + s"Cannot compare ${lopString} with ${ropString} using operator ${equalityBehavior.operator}", + left.asExpr + ) } } /** - * (not used yet but will be used when support for 'extras' dsl functionality is added) - * Do equality checking on the database level with the ansi-style truth table (i.e. always false if one side is null) + * (not used yet but will be used when support for 'extras' dsl + * functionality is added) Do equality checking on the database level with + * the ansi-style truth table (i.e. always false if one side is null) */ - def equalityWithInnerTypechecksAnsi(using Quotes, History)(left: quotes.reflect.Term, right: quotes.reflect.Term)(equalityBehavior: EqualityBehavior) = { + def equalityWithInnerTypechecksAnsi(using Quotes, History)(left: quotes.reflect.Term, right: quotes.reflect.Term)( + equalityBehavior: EqualityBehavior + ) = { import quotes.reflect.{Ident => TIdent, ValDef => TValDef, _} import io.getquill.ast.Implicits._ val (leftIsOptional, rightIsOptional) = checkInnerTypes(left, right, AllowInnerCompare) - val a = rootParse(left.asExpr) - val b = rootParse(right.asExpr) - val comparison = BinaryOperation(a, equalityBehavior.operator, b) + val a = rootParse(left.asExpr) + val b = rootParse(right.asExpr) + val comparison = BinaryOperation(a, equalityBehavior.operator, b) (leftIsOptional, rightIsOptional) match { case (true, true) => OptionIsDefined(a) +&&+ OptionIsDefined(b) +&&+ comparison case (true, false) => OptionIsDefined(a) +&&+ comparison @@ -295,40 +311,54 @@ object ParserHelpers { case object ForbidInnerCompare extends OptionCheckBehavior /** - * Type-check two trees, if one of them has optionals, go into the optionals to find the root types - * in each of them. Then compare the types that are inside. If they are not compareable, abort the build. - * Otherwise return type of which side (or both) has the optional. In order to do the actual comparison, - * the 'weak conformance' operator is used and a subclass is allowed on either side of the `==`. Weak - * conformance is necessary so that Longs can be compared to Ints etc... + * Type-check two trees, if one of them has optionals, go into the optionals + * to find the root types in each of them. Then compare the types that are + * inside. If they are not compareable, abort the build. Otherwise return + * type of which side (or both) has the optional. In order to do the actual + * comparison, the 'weak conformance' operator is used and a subclass is + * allowed on either side of the `==`. Weak conformance is necessary so that + * Longs can be compared to Ints etc... */ - def checkInnerTypes(using Quotes)(lhs: quotes.reflect.Term, rhs: quotes.reflect.Term, optionCheckBehavior: OptionCheckBehavior): (Boolean, Boolean) = { + def checkInnerTypes(using Quotes)( + lhs: quotes.reflect.Term, + rhs: quotes.reflect.Term, + optionCheckBehavior: OptionCheckBehavior + ): (Boolean, Boolean) = { import quotes.reflect.{Ident => TIdent, ValDef => TValDef, _} - val leftType = lhs.tpe + val leftType = lhs.tpe val rightType = rhs.tpe // Note that this only goes inside the optional one level i.e. Option[T] => T. If we have Option[Option[T]] it will return the inside Option[T]. // This is by design. If the types do not match, even if we normally don't care about the outer layer // (i.e. for equalityWithInnerTypechecksAnsi where Option[T] == T is allowed (that's in the 'extras' modules which uses ===)) // we still want to fail with an exception that the types are identical if the user does Option[Option[T]] == Option[T] since that is a serious // typing error. - val leftInner = innerOptionParam(leftType) + val leftInner = innerOptionParam(leftType) val rightInner = innerOptionParam(rightType) - val leftIsOptional = isOptionType(leftType) && !(leftType.widen =:= TypeRepr.of[Nothing]) && !(leftType.widen =:= TypeRepr.of[Null]) - val rightIsOptional = isOptionType(rightType) && !(rightType.widen =:= TypeRepr.of[Nothing]) && !(rightType.widen =:= TypeRepr.of[Null]) + val leftIsOptional = + isOptionType(leftType) && !(leftType.widen =:= TypeRepr.of[Nothing]) && !(leftType.widen =:= TypeRepr.of[Null]) + val rightIsOptional = + isOptionType(rightType) && !(rightType.widen =:= TypeRepr.of[Nothing]) && !(rightType.widen =:= TypeRepr + .of[Null]) val typesMatch = wideMatchTypes(rightInner, leftInner) optionCheckBehavior match { case AllowInnerCompare if typesMatch => (leftIsOptional, rightIsOptional) - case ForbidInnerCompare if ((leftIsOptional && rightIsOptional) || (!leftIsOptional && !rightIsOptional)) && typesMatch => + case ForbidInnerCompare + if ((leftIsOptional && rightIsOptional) || (!leftIsOptional && !rightIsOptional)) && typesMatch => (leftIsOptional, rightIsOptional) case _ => if (leftIsOptional || rightIsOptional) report.throwError( - s"${Format.TypeReprW(leftType)} == ${Format.TypeReprW(rightType)} is not allowed since ${Format.TypeReprW(leftInner)}, ${Format.TypeReprW(rightInner)} are different types.", + s"${Format.TypeReprW(leftType)} == ${Format.TypeReprW(rightType)} is not allowed since ${Format + .TypeReprW(leftInner)}, ${Format.TypeReprW(rightInner)} are different types.", lhs.asExpr ) else - report.throwError(s"${Format.TypeReprW(leftType)} == ${Format.TypeReprW(rightType)} is not allowed since they are different types.", lhs.asExpr) + report.throwError( + s"${Format.TypeReprW(leftType)} == ${Format.TypeReprW(rightType)} is not allowed since they are different types.", + lhs.asExpr + ) } } // end checkInnerTypes @@ -339,13 +369,18 @@ object ParserHelpers { } /** - * Match types in the most wide way possible. This function is not for generalized type equality since quill does not directly - * compare anything, rather it just translates things into SQL expressions. This kind of check is used in a general sense when things - * that it doesn't even make sense to compare are compared e.g. an Person and a String. In this case, we want to provide some kind - * of compile-time warning that the comparision the user is attempting to do in SQL is non sensical in the first place. Therefore when - * there is any kind of possibility that the expression makes sense (e.g. by comparing a Dog to a Animal (i.e. class to subclass), by comparing - * two numeric types of any kind etc... we allow the comparison to happen). - * For int/long/float/double comparisons don't crash on compile-time typing can re-evaluate this upon user feedback + * Match types in the most wide way possible. This function is not for + * generalized type equality since quill does not directly compare anything, + * rather it just translates things into SQL expressions. This kind of check + * is used in a general sense when things that it doesn't even make sense to + * compare are compared e.g. an Person and a String. In this case, we want + * to provide some kind of compile-time warning that the comparision the + * user is attempting to do in SQL is non sensical in the first place. + * Therefore when there is any kind of possibility that the expression makes + * sense (e.g. by comparing a Dog to a Animal (i.e. class to subclass), by + * comparing two numeric types of any kind etc... we allow the comparison to + * happen). For int/long/float/double comparisons don't crash on + * compile-time typing can re-evaluate this upon user feedback */ def wideMatchTypes(using Quotes)(a: quotes.reflect.TypeRepr, b: quotes.reflect.TypeRepr) = a.widen =:= b.widen || a.widen <:< b.widen || b.widen <:< a.widen || (isNumeric(a.widen) && isNumeric(b.widen)) @@ -389,7 +424,11 @@ object ParserHelpers { val body = rhsOpt match { // TODO Better site-description in error - case None => report.throwError(s"Cannot parse 'val' clause with no '= rhs' (i.e. equals and right hand side) of ${Printer.TreeStructure.show(tree)}") + case None => + report.throwError( + s"Cannot parse 'val' clause with no '= rhs' (i.e. equals and right hand side) of ${Printer.TreeStructure + .show(tree)}" + ) case Some(rhs) => rhs } val bodyAst = rootParse(body.asExpr) @@ -399,7 +438,11 @@ object ParserHelpers { val body = rhsOpt match { // TODO Better site-description in error - case None => report.throwError(s"Cannot parse 'val' clause with no '= rhs' (i.e. equals and right hand side) of ${Printer.TreeStructure.show(tree)}") + case None => + report.throwError( + s"Cannot parse 'val' clause with no '= rhs' (i.e. equals and right hand side) of ${Printer.TreeStructure + .show(tree)}" + ) case Some(rhs) => rhs } val bodyAst = rootParse(body.asExpr) @@ -414,7 +457,7 @@ object ParserHelpers { enum PatMatch { // Represents a variable assignment pattern match i.e. single clause with no guards e.g. // ptups.map { case (name, age) => ... } where ptups := people.map(p => (p.name, p.age)) - case SimpleClause(body: Ast) extends PatMatch + case SimpleClause(body: Ast) extends PatMatch case MultiClause(clauses: List[PatMatchClause]) extends PatMatch // In some cases, scala compiler adds a trivial boolean clause to a tuple pattern match // we detect these and can just spliced TRUE or 1=1 in those cases @@ -448,11 +491,11 @@ object ParserHelpers { case m @ Match(expr, caseDefs) => println(s"Doing Multi-Clause Pat-match: ${Format(Printer.TreeStructure.show(m))}") val clauses = - caseDefs.map { - case CaseDef(fields, guard, body) => - val bodyAst = betaReduceTupleFields(expr, fields, Some(root))(body) - val guardAst = guard.map(betaReduceTupleFields(expr, fields)(_)).getOrElse(ast.Constant(true, Quat.BooleanValue)) - PatMatchClause(bodyAst, guardAst) + caseDefs.map { case CaseDef(fields, guard, body) => + val bodyAst = betaReduceTupleFields(expr, fields, Some(root))(body) + val guardAst = + guard.map(betaReduceTupleFields(expr, fields)(_)).getOrElse(ast.Constant(true, Quat.BooleanValue)) + PatMatchClause(bodyAst, guardAst) } Some(PatMatch.MultiClause(clauses)) @@ -462,17 +505,21 @@ object ParserHelpers { } /** - * Beta-reduces out tuple members that have been pattern matched to their corresponding components - * For example: - * given: ptups := people.map(p => (p.name, p.age)) - * ptups.map { case (name, age) => fun(name, age) } - * becomes reduced to: - * ptups.map { x => fun(x.name, x.age) } + * Beta-reduces out tuple members that have been pattern matched to their + * corresponding components For example: given: ptups := people.map(p => + * (p.name, p.age)) ptups.map { case (name, age) => fun(name, age) } becomes + * reduced to: ptups.map { x => fun(x.name, x.age) } */ - protected def betaReduceTupleFields(using Quotes, History, TranspileConfig)(tupleTree: quotes.reflect.Term, fieldsTree: quotes.reflect.Tree, messageExpr: Option[quotes.reflect.Term] = None)(bodyTree: quotes.reflect.Term): Ast = { + protected def betaReduceTupleFields(using + Quotes, + History, + TranspileConfig + )(tupleTree: quotes.reflect.Term, fieldsTree: quotes.reflect.Tree, messageExpr: Option[quotes.reflect.Term] = None)( + bodyTree: quotes.reflect.Term + ): Ast = { import quotes.reflect.{Ident => TIdent, ValDef => TValDef, _} // TODO Need to verify that this is actually a tuple? - val tuple = rootParse(tupleTree.asExpr) + val tuple = rootParse(tupleTree.asExpr) val bodyRaw = rootParse(bodyTree.asExpr) // In some cases the body expression itself is so complex it needs to be beta-reduced before we start // beta reducing the pat match tuples otherwise some issues can happen. This was discovered in the DepartmentsSpec tests @@ -483,7 +530,7 @@ object ParserHelpers { foo match { case ((a,b),c) => bar } would yield something like: List((a,List(_1, _1)), (b,List(_1, _2)), (c,List(_2))) */ - def tupleBindsPath(field: Tree, path: List[String] = List()): List[(AIdent, List[String])] = { + def tupleBindsPath(field: Tree, path: List[String] = List()): List[(AIdent, List[String])] = UntypeTree(field) match { case Bind(name, TIdent(_)) => List(AIdent(name) -> path) case Unapply(Method0(TupleIdent(), "unapply"), something, binds) => @@ -500,12 +547,13 @@ object ParserHelpers { case Some(expr) => s" in the expression: ${Format.Tree(expr)}" case None => "" } - report.throwError(s"Invalid Pattern Matching Term: ${Format.Tree(other)}${addition}.\n" + - s"Quill Query Pattern matches must be correctly matching tuples.\n" + - s"For example for query[Person].map(p => (p.name, p.age)) you can then do:\n" + - s"query[Person].map(p => (p.name, p.age)).map { case (name, age) => ... }") + report.throwError( + s"Invalid Pattern Matching Term: ${Format.Tree(other)}${addition}.\n" + + s"Quill Query Pattern matches must be correctly matching tuples.\n" + + s"For example for query[Person].map(p => (p.name, p.age)) you can then do:\n" + + s"query[Person].map(p => (p.name, p.age)).map { case (name, age) => ... }" + ) } - } /* Take the list found in the tupleBindsPath method above and match up each match-tuple element from the original tuple we found. For example, if we had: foo match { case ((a,b),c) => bar } @@ -513,11 +561,11 @@ object ParserHelpers { is ((f,b),z) then we want to get: List(((f,b),z)._1._1, ((f,b),z)._1._2, ((f,b),z)._2) */ def propertyAt(path: List[String]) = - path.foldLeft(tuple) { - case (tup, elem) => Property(tup, elem) + path.foldLeft(tuple) { case (tup, elem) => + Property(tup, elem) } - val fieldPaths = tupleBindsPath(fieldsTree) + val fieldPaths = tupleBindsPath(fieldsTree) val reductionTuples = fieldPaths.map((id, path) => (id, propertyAt(path))) val interp = new Interpolator(TraceType.Standard, summon[TranspileConfig].traceConfig, 1) @@ -548,7 +596,8 @@ object ParserHelpers { expr match { // Putting a type-apply in all possible places to detect all possible variations of the // implicit class pattern that we need to warn about for Scala 3 (since it no-longer works). - case expr @ Unseal(UntypeApply(cc @ Apply(UntypeApply(ccid), List(constructorArg)))) if (isImplicitClassMaker(cc) && isImplicitClassMethod(ccid)) => + case expr @ Unseal(UntypeApply(cc @ Apply(UntypeApply(ccid), List(constructorArg)))) + if (isImplicitClassMaker(cc) && isImplicitClassMethod(ccid)) => Some((ccid.tpe, constructorArg)) case _ => None diff --git a/quill-sql/src/main/scala/io/getquill/parser/SerialHelper.scala b/quill-sql/src/main/scala/io/getquill/parser/SerialHelper.scala index 7e53dd054..248365664 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/SerialHelper.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/SerialHelper.scala @@ -19,7 +19,7 @@ import io.getquill.util.CommonExtensions.Throwable._ object SerialHelper { import io.getquill.quat.{Quat => QQuat} object Ast { - def fromSerialized(serial: String): Ast = BooSerializer.Ast.deserialize(serial) + def fromSerialized(serial: String): Ast = BooSerializer.Ast.deserialize(serial) def toSerialized(value: ast.Ast): String = BooSerializer.Ast.serialize(value) class Expr[T] { def apply(value: ast.Ast)(using Quotes, TType[T]) = { @@ -41,7 +41,7 @@ object SerialHelper { object Quat { def fromSerialized(serial: String): QQuat = BooSerializer.Quat.deserialize(serial) - def toSerialized(quat: QQuat): String = BooSerializer.Quat.serialize(quat) + def toSerialized(quat: QQuat): String = BooSerializer.Quat.serialize(quat) class Expr[T] { def apply(value: QQuat)(using Quotes, TType[T]) = { import quotes.reflect._ @@ -62,7 +62,7 @@ object SerialHelper { object QuatProduct { def fromSerialized(serial: String): QQuat.Product = BooSerializer.QuatProduct.deserialize(serial) - def toSerialized(quat: QQuat.Product): String = BooSerializer.QuatProduct.serialize(quat) + def toSerialized(quat: QQuat.Product): String = BooSerializer.QuatProduct.serialize(quat) class Expr[T] { def apply(value: QQuat.Product)(using Quotes, TType[T]) = { import quotes.reflect._ diff --git a/quill-sql/src/main/scala/io/getquill/parser/SerializationBehavior.scala b/quill-sql/src/main/scala/io/getquill/parser/SerializationBehavior.scala index 632febc59..2b22011e1 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/SerializationBehavior.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/SerializationBehavior.scala @@ -3,9 +3,9 @@ package io.getquill.parser sealed trait SerializationBehavior object SerializationBehavior { sealed trait SkipSerialize extends SerializationBehavior - case object SkipSerialize extends SkipSerialize - sealed trait Default extends SerializationBehavior - case object Default extends Default + case object SkipSerialize extends SkipSerialize + sealed trait Default extends SerializationBehavior + case object Default extends Default } trait DoSerialize { diff --git a/quill-sql/src/main/scala/io/getquill/parser/Serialize.scala b/quill-sql/src/main/scala/io/getquill/parser/Serialize.scala index 09d9900fe..081565101 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/Serialize.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/Serialize.scala @@ -27,8 +27,11 @@ object SerializeQuat { import quotes.reflect._ expr.asTerm.underlyingArgument.asExprOf[SerializeQuat] match { case Expr(serializeQuat) => serializeQuat - case other => report.throwError( - s"""|Found an implicit instrument to Serialize Quats but could not read it from expression: ${Format.Expr(other)}. + case other => + report.throwError( + s"""|Found an implicit instrument to Serialize Quats but could not read it from expression: ${Format.Expr( + other + )}. |Make sure that the SerializeQuat implicit is defined as an inline-given (or implicit inline def) for example: |inline given SerializeQuat = SerializeQuat.All |val q = quote { myQuery } // will use the above given @@ -70,8 +73,11 @@ object SerializeAst { import quotes.reflect._ expr.asTerm.underlyingArgument.asExprOf[SerializeAst] match { case Expr(serializeAst) => serializeAst - case other => report.throwError( - s"""|Found an implicit instrument to Serialize Asts but could not read it from expression: ${Format.Expr(other)}. + case other => + report.throwError( + s"""|Found an implicit instrument to Serialize Asts but could not read it from expression: ${Format.Expr( + other + )}. |Make sure that the SerializeAst implicit is defined as an inline-given (or implicit inline def) for example: |inline given SerializeAst = SerializeAst.All |val q = quote { myQuery } // will use the above given diff --git a/quill-sql/src/main/scala/io/getquill/parser/Unlifter.scala b/quill-sql/src/main/scala/io/getquill/parser/Unlifter.scala index 0c0046a75..78014a230 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/Unlifter.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/Unlifter.scala @@ -25,7 +25,10 @@ object Unlifter { // println(s"==================== Unlifting: ${Format.Expr(ast)} =================}") ast match { case Is[Ast](ast) => unliftAst.attempt(ast) - case other => report.throwError(s"Cannot unlift ${Format.Expr(ast)} to a ast because it's type is: ${Format.TypeRepr(ast.asTerm.tpe)}") + case other => + report.throwError( + s"Cannot unlift ${Format.Expr(ast)} to a ast because it's type is: ${Format.TypeRepr(ast.asTerm.tpe)}" + ) } } } @@ -36,12 +39,16 @@ object Unlifter { // println(s"==================== Unlifting: ${Format.Expr(ast)} =================}") ast match { case Is[CaseClass](ast) => unliftCaseClass.attempt(ast) - case other => report.throwError(s"Cannot unlift ${Format.Expr(ast)} to a CaseClass because it's type is: ${Format.TypeRepr(ast.asTerm.tpe)}") + case other => + report.throwError( + s"Cannot unlift ${Format.Expr(ast)} to a CaseClass because it's type is: ${Format.TypeRepr(ast.asTerm.tpe)}" + ) } } } - def apply(ast: Expr[Ast]): Quotes ?=> Ast = unliftAst.apply(ast) // can also do ast.lift but this makes some error messages simpler + def apply(ast: Expr[Ast]): Quotes ?=> Ast = + unliftAst.apply(ast) // can also do ast.lift but this makes some error messages simpler extension [T](t: Expr[T])(using FromExpr[T], Quotes) { def unexpr: T = t.valueOrError @@ -66,21 +73,21 @@ object Unlifter { def apply(expr: Expr[T])(using Quotes): T = { import quotes.reflect._ - attempt(expr) - .getOrElse { - report.throwError( - s"Could not Unlift AST type ${classTag[T].runtimeClass.getSimpleName} from the element:\n" + - s"${section(Format.Expr.Detail(expr))}\n" + - s"of the Quill Abstract Syntax Tree", - expr - ) - } + attempt(expr).getOrElse { + report.throwError( + s"Could not Unlift AST type ${classTag[T].runtimeClass.getSimpleName} from the element:\n" + + s"${section(Format.Expr.Detail(expr))}\n" + + s"of the Quill Abstract Syntax Tree", + expr + ) + } } /** - * For things that contain subclasses, don't strictly check the super type and fail the match - * if the type fails since we can't do that when it could be a sub-type of the type being matched. - * The only thing we can do in that case is go through the sub-parser and see if anything matches. + * For things that contain subclasses, don't strictly check the super type + * and fail the match if the type fails since we can't do that when it could + * be a sub-type of the type being matched. The only thing we can do in that + * case is go through the sub-parser and see if anything matches. */ def unapply(expr: Expr[T])(using Quotes): Option[T] = Some(apply(expr)) // TODO Maybe want to go to stricter version of this going back to Some(apply(expr)). See comment below about quoted matching being covariant. @@ -143,20 +150,20 @@ object Unlifter { } given unliftAssignment: NiceUnliftable[Assignment] with { - def unlift = { - case '{ Assignment($alias, $property, $value) } => Assignment(alias.unexpr, property.unexpr, value.unexpr) + def unlift = { case '{ Assignment($alias, $property, $value) } => + Assignment(alias.unexpr, property.unexpr, value.unexpr) } } given unliftAssignmentDual: NiceUnliftable[AssignmentDual] with { - def unlift = { - case '{ AssignmentDual($alias1, $alias2, $property, $value) } => AssignmentDual(alias1.unexpr, alias2.unexpr, property.unexpr, value.unexpr) + def unlift = { case '{ AssignmentDual($alias1, $alias2, $property, $value) } => + AssignmentDual(alias1.unexpr, alias2.unexpr, property.unexpr, value.unexpr) } } given unliftPropertyAlias: NiceUnliftable[PropertyAlias] with { - def unlift = { - case '{ PropertyAlias($paths, $b) } => PropertyAlias(paths.unexpr, b.unexpr) + def unlift = { case '{ PropertyAlias($paths, $b) } => + PropertyAlias(paths.unexpr, b.unexpr) } } @@ -177,23 +184,26 @@ object Unlifter { // OptionNone.quat which would otherwise happen if quat.unexper would be passed directly. val unliftedQuat = quat.unexpr OptionNone(unliftedQuat) - case Is[OptionIsEmpty]('{ OptionIsEmpty.apply($a) }) => OptionIsEmpty(a.unexpr) - case Is[OptionNonEmpty]('{ OptionNonEmpty.apply($a) }) => OptionNonEmpty(a.unexpr) - case Is[OptionIsDefined]('{ OptionIsDefined.apply($a) }) => OptionIsDefined(a.unexpr) - case Is[OptionGetOrElse]('{ OptionGetOrElse.apply($a, $b) }) => OptionGetOrElse(a.unexpr, b.unexpr) - case Is[OptionGetOrNull]('{ OptionGetOrNull.apply($a) }) => OptionGetOrNull(a.unexpr) - case Is[OptionOrNull]('{ OptionOrNull.apply($a) }) => OptionOrNull(a.unexpr) - case Is[FilterIfDefined]('{ FilterIfDefined.apply($a, $b, $c) }) => FilterIfDefined(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionContains]('{ OptionContains.apply($a, $b) }) => OptionContains(a.unexpr, b.unexpr) - case Is[OptionMap]('{ OptionMap.apply($a, $b, $c) }) => OptionMap(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionFlatMap]('{ OptionFlatMap.apply($a, $b, $c) }) => OptionFlatMap(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionFlatten]('{ OptionFlatten.apply($a) }) => OptionFlatten(a.unexpr) - case Is[OptionTableMap]('{ OptionTableMap.apply($a, $b, $c) }) => OptionTableMap(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionTableFlatMap]('{ OptionTableFlatMap.apply($a, $b, $c) }) => OptionTableFlatMap(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionExists]('{ OptionExists.apply($a, $b, $c) }) => OptionExists(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionForall]('{ OptionForall.apply($a, $b, $c) }) => OptionForall(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionTableExists]('{ OptionTableExists.apply($a, $b, $c) }) => OptionTableExists(a.unexpr, b.unexpr, c.unexpr) - case Is[OptionTableForall]('{ OptionTableForall.apply($a, $b, $c) }) => OptionTableForall(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionIsEmpty]('{ OptionIsEmpty.apply($a) }) => OptionIsEmpty(a.unexpr) + case Is[OptionNonEmpty]('{ OptionNonEmpty.apply($a) }) => OptionNonEmpty(a.unexpr) + case Is[OptionIsDefined]('{ OptionIsDefined.apply($a) }) => OptionIsDefined(a.unexpr) + case Is[OptionGetOrElse]('{ OptionGetOrElse.apply($a, $b) }) => OptionGetOrElse(a.unexpr, b.unexpr) + case Is[OptionGetOrNull]('{ OptionGetOrNull.apply($a) }) => OptionGetOrNull(a.unexpr) + case Is[OptionOrNull]('{ OptionOrNull.apply($a) }) => OptionOrNull(a.unexpr) + case Is[FilterIfDefined]('{ FilterIfDefined.apply($a, $b, $c) }) => FilterIfDefined(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionContains]('{ OptionContains.apply($a, $b) }) => OptionContains(a.unexpr, b.unexpr) + case Is[OptionMap]('{ OptionMap.apply($a, $b, $c) }) => OptionMap(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionFlatMap]('{ OptionFlatMap.apply($a, $b, $c) }) => OptionFlatMap(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionFlatten]('{ OptionFlatten.apply($a) }) => OptionFlatten(a.unexpr) + case Is[OptionTableMap]('{ OptionTableMap.apply($a, $b, $c) }) => OptionTableMap(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionTableFlatMap]('{ OptionTableFlatMap.apply($a, $b, $c) }) => + OptionTableFlatMap(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionExists]('{ OptionExists.apply($a, $b, $c) }) => OptionExists(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionForall]('{ OptionForall.apply($a, $b, $c) }) => OptionForall(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionTableExists]('{ OptionTableExists.apply($a, $b, $c) }) => + OptionTableExists(a.unexpr, b.unexpr, c.unexpr) + case Is[OptionTableForall]('{ OptionTableForall.apply($a, $b, $c) }) => + OptionTableForall(a.unexpr, b.unexpr, c.unexpr) } } @@ -254,13 +264,16 @@ object Unlifter { given unliftAction: NiceUnliftable[Action] with { def unlift = { - case Is[Update]('{ Update($query, $assignments) }) => Update(query.unexpr, assignments.unexpr) - case Is[Insert]('{ Insert($query, $assignments) }) => Insert(query.unexpr, assignments.unexpr) - case Is[Delete]('{ Delete($query) }) => Delete(query.unexpr) - case Is[Returning]('{ Returning(${ action }, ${ alias }, ${ body }: Ast) }) => Returning(action.unexpr, alias.unexpr, body.unexpr) - case Is[ReturningGenerated]('{ ReturningGenerated(${ action }, ${ alias }, ${ body }: Ast) }) => ReturningGenerated(action.unexpr, alias.unexpr, body.unexpr) - case Is[Foreach]('{ Foreach(${ query }, ${ alias }, ${ body }: Ast) }) => Foreach(query.unexpr, alias.unexpr, body.unexpr) - case Is[OnConflict]('{ OnConflict($a, $b, $c) }) => OnConflict(a.unexpr, b.unexpr, c.unexpr) + case Is[Update]('{ Update($query, $assignments) }) => Update(query.unexpr, assignments.unexpr) + case Is[Insert]('{ Insert($query, $assignments) }) => Insert(query.unexpr, assignments.unexpr) + case Is[Delete]('{ Delete($query) }) => Delete(query.unexpr) + case Is[Returning]('{ Returning(${ action }, ${ alias }, ${ body }: Ast) }) => + Returning(action.unexpr, alias.unexpr, body.unexpr) + case Is[ReturningGenerated]('{ ReturningGenerated(${ action }, ${ alias }, ${ body }: Ast) }) => + ReturningGenerated(action.unexpr, alias.unexpr, body.unexpr) + case Is[Foreach]('{ Foreach(${ query }, ${ alias }, ${ body }: Ast) }) => + Foreach(query.unexpr, alias.unexpr, body.unexpr) + case Is[OnConflict]('{ OnConflict($a, $b, $c) }) => OnConflict(a.unexpr, b.unexpr, c.unexpr) } } @@ -280,25 +293,33 @@ object Unlifter { given unliftQuery: NiceUnliftable[AQuery] with { def unlift = { - case Is[Entity](ent) => unliftEntity(ent) - case Is[Map]('{ Map(${ query }, ${ alias }, ${ body }: Ast) }) => Map(query.unexpr, alias.unexpr, body.unexpr) - case Is[FlatMap]('{ FlatMap(${ query }, ${ alias }, ${ body }: Ast) }) => FlatMap(query.unexpr, alias.unexpr, body.unexpr) - case Is[Filter]('{ Filter(${ query }, ${ alias }, ${ body }: Ast) }) => Filter(query.unexpr, alias.unexpr, body.unexpr) - case Is[GroupBy]('{ GroupBy(${ query }, ${ alias }, ${ body }: Ast) }) => GroupBy(query.unexpr, alias.unexpr, body.unexpr) + case Is[Entity](ent) => unliftEntity(ent) + case Is[Map]('{ Map(${ query }, ${ alias }, ${ body }: Ast) }) => Map(query.unexpr, alias.unexpr, body.unexpr) + case Is[FlatMap]('{ FlatMap(${ query }, ${ alias }, ${ body }: Ast) }) => + FlatMap(query.unexpr, alias.unexpr, body.unexpr) + case Is[Filter]('{ Filter(${ query }, ${ alias }, ${ body }: Ast) }) => + Filter(query.unexpr, alias.unexpr, body.unexpr) + case Is[GroupBy]('{ GroupBy(${ query }, ${ alias }, ${ body }: Ast) }) => + GroupBy(query.unexpr, alias.unexpr, body.unexpr) case Is[GroupByMap]('{ GroupByMap(${ query }, ${ byAlias }, ${ byBody }, ${ mapAlias }, ${ mapBody }) }) => GroupByMap(query.unexpr, byAlias.unexpr, byBody.unexpr, mapAlias.unexpr, mapBody.unexpr) - case Is[SortBy]('{ SortBy(${ query }, ${ alias }, ${ criterias }, ${ ordering }) }) => SortBy(query.unexpr, alias.unexpr, criterias.unexpr, ordering.unexpr) - case Is[Distinct]('{ Distinct(${ a }) }) => Distinct(a.unexpr) - case Is[DistinctOn]('{ DistinctOn(${ query }, ${ alias }, $body) }) => DistinctOn(query.unexpr, alias.unexpr, body.unexpr) - case Is[Nested]('{ Nested(${ a }) }) => Nested(a.unexpr) - case Is[Union]('{ Union($a, $b) }) => Union(a.unexpr, b.unexpr) - case Is[UnionAll]('{ UnionAll($a, $b) }) => UnionAll(a.unexpr, b.unexpr) - case Is[Join]('{ Join($typ, $a, $b, $aliasA, $aliasB, $on) }) => Join(typ.unexpr, a.unexpr, b.unexpr, aliasA.unexpr, aliasB.unexpr, on.unexpr) - case Is[FlatJoin]('{ FlatJoin($typ, $a, $aliasA, $on) }) => FlatJoin(typ.unexpr, a.unexpr, aliasA.unexpr, on.unexpr) - case Is[Take]('{ Take($query, $num) }) => Take(query.unexpr, num.unexpr) - case Is[Drop]('{ Drop($query, $num) }) => Drop(query.unexpr, num.unexpr) - case Is[ConcatMap]('{ ConcatMap(${ query }, ${ alias }, ${ body }: Ast) }) => ConcatMap(query.unexpr, alias.unexpr, body.unexpr) + case Is[SortBy]('{ SortBy(${ query }, ${ alias }, ${ criterias }, ${ ordering }) }) => + SortBy(query.unexpr, alias.unexpr, criterias.unexpr, ordering.unexpr) + case Is[Distinct]('{ Distinct(${ a }) }) => Distinct(a.unexpr) + case Is[DistinctOn]('{ DistinctOn(${ query }, ${ alias }, $body) }) => + DistinctOn(query.unexpr, alias.unexpr, body.unexpr) + case Is[Nested]('{ Nested(${ a }) }) => Nested(a.unexpr) + case Is[Union]('{ Union($a, $b) }) => Union(a.unexpr, b.unexpr) + case Is[UnionAll]('{ UnionAll($a, $b) }) => UnionAll(a.unexpr, b.unexpr) + case Is[Join]('{ Join($typ, $a, $b, $aliasA, $aliasB, $on) }) => + Join(typ.unexpr, a.unexpr, b.unexpr, aliasA.unexpr, aliasB.unexpr, on.unexpr) + case Is[FlatJoin]('{ FlatJoin($typ, $a, $aliasA, $on) }) => + FlatJoin(typ.unexpr, a.unexpr, aliasA.unexpr, on.unexpr) + case Is[Take]('{ Take($query, $num) }) => Take(query.unexpr, num.unexpr) + case Is[Drop]('{ Drop($query, $num) }) => Drop(query.unexpr, num.unexpr) + case Is[ConcatMap]('{ ConcatMap(${ query }, ${ alias }, ${ body }: Ast) }) => + ConcatMap(query.unexpr, alias.unexpr, body.unexpr) // Note: Aggregation is actually a Query-Type. Not sure why in Scala2-Quill it's not in the query-unlifter case Is[Aggregation]('{ Aggregation(${ operator }, ${ query }) }) => Aggregation(operator.unexpr, query.unexpr) } @@ -319,25 +340,28 @@ object Unlifter { given unliftAst: NiceUnliftable[Ast] with { def unlift = { - case Is[AQuery](q) => unliftQuery(q) - case Is[Constant](c) => unliftConstant(c) - case Is[Action](a) => unliftAction(a) - case Is[Entity](p) => unliftEntity(p) - case Is[Property](p) => unliftProperty(p) - case Is[AIdent](i) => unliftIdent(i) - case Is[Ordering](o) => unliftOrdering(o) - case Is[If]('{ If($cond, $thenStmt, $elseStmt) }) => If(cond.unexpr, thenStmt.unexpr, elseStmt.unexpr) - case Is[Function]('{ Function($params, $body) }) => Function(params.unexpr, body.unexpr) - case Is[FunctionApply]('{ FunctionApply($function, $values) }) => FunctionApply(function.unexpr, values.unexpr) - case Is[UnaryOperation]('{ UnaryOperation(${ operator }, ${ a }: Ast) }) => UnaryOperation(unliftOperator(operator).asInstanceOf[UnaryOperator], a.unexpr) - case Is[BinaryOperation]('{ BinaryOperation(${ a }, ${ operator }, ${ b }: Ast) }) => BinaryOperation(a.unexpr, unliftOperator(operator).asInstanceOf[BinaryOperator], b.unexpr) - case Is[Property]('{ Property(${ ast }, ${ name }) }) => Property(ast.unexpr, constString(name)) - case Is[ScalarTag]('{ ScalarTag(${ uid }, ${ source }) }) => ScalarTag(constString(uid), source.unexpr) - case Is[QuotationTag]('{ QuotationTag($uid) }) => QuotationTag(constString(uid)) - case Is[Infix]('{ Infix($parts, $params, $pure, $transparent, $quat) }) => Infix(parts.unexpr, params.unexpr, pure.unexpr, transparent.unexpr, quat.unexpr) - case Is[Tuple]('{ Tuple.apply($values) }) => Tuple(values.unexpr) - case Is[CaseClass](ast) => unliftCaseClass(ast) - case Is[IterableOperation](unliftTraversableOperation(o)) => o + case Is[AQuery](q) => unliftQuery(q) + case Is[Constant](c) => unliftConstant(c) + case Is[Action](a) => unliftAction(a) + case Is[Entity](p) => unliftEntity(p) + case Is[Property](p) => unliftProperty(p) + case Is[AIdent](i) => unliftIdent(i) + case Is[Ordering](o) => unliftOrdering(o) + case Is[If]('{ If($cond, $thenStmt, $elseStmt) }) => If(cond.unexpr, thenStmt.unexpr, elseStmt.unexpr) + case Is[Function]('{ Function($params, $body) }) => Function(params.unexpr, body.unexpr) + case Is[FunctionApply]('{ FunctionApply($function, $values) }) => FunctionApply(function.unexpr, values.unexpr) + case Is[UnaryOperation]('{ UnaryOperation(${ operator }, ${ a }: Ast) }) => + UnaryOperation(unliftOperator(operator).asInstanceOf[UnaryOperator], a.unexpr) + case Is[BinaryOperation]('{ BinaryOperation(${ a }, ${ operator }, ${ b }: Ast) }) => + BinaryOperation(a.unexpr, unliftOperator(operator).asInstanceOf[BinaryOperator], b.unexpr) + case Is[Property]('{ Property(${ ast }, ${ name }) }) => Property(ast.unexpr, constString(name)) + case Is[ScalarTag]('{ ScalarTag(${ uid }, ${ source }) }) => ScalarTag(constString(uid), source.unexpr) + case Is[QuotationTag]('{ QuotationTag($uid) }) => QuotationTag(constString(uid)) + case Is[Infix]('{ Infix($parts, $params, $pure, $transparent, $quat) }) => + Infix(parts.unexpr, params.unexpr, pure.unexpr, transparent.unexpr, quat.unexpr) + case Is[Tuple]('{ Tuple.apply($values) }) => Tuple(values.unexpr) + case Is[CaseClass](ast) => unliftCaseClass(ast) + case Is[IterableOperation](unliftTraversableOperation(o)) => o // TODO Is the matching covariant? In that case can do "case '{ $oo: OptionOperation } and then strictly throw an error" case Is[OptionOperation](ast) => unliftOptionOperation(ast) case Is[Assignment](ast) => unliftAssignment(ast) @@ -355,8 +379,8 @@ object Unlifter { } given unliftCaseClass: NiceUnliftable[CaseClass] with { - def unlift = { - case '{ CaseClass(${ name }, ${ values }: List[(String, Ast)]) } => CaseClass(name.unexpr, values.unexpr) + def unlift = { case '{ CaseClass(${ name }, ${ values }: List[(String, Ast)]) } => + CaseClass(name.unexpr, values.unexpr) } } @@ -402,8 +426,14 @@ object Unlifter { given quatProductUnliftable: NiceUnliftable[Quat.Product] with { def unlift = { - case '{ Quat.Product.WithRenamesCompact.apply($name, $tpe)(${ Varargs(fields) }: _*)(${ Varargs(values) }: _*)(${ Varargs(renamesFrom) }: _*)(${ Varargs(renamesTo) }: _*) } => - Quat.Product.WithRenamesCompact(name.unexpr, tpe.unexpr)(fields.unexprSeq: _*)(values.unexprSeq: _*)(renamesFrom.unexprSeq: _*)(renamesTo.unexprSeq: _*) + case '{ + Quat.Product.WithRenamesCompact.apply($name, $tpe)(${ Varargs(fields) }: _*)(${ Varargs(values) }: _*)(${ + Varargs(renamesFrom) + }: _*)(${ Varargs(renamesTo) }: _*) + } => + Quat.Product.WithRenamesCompact(name.unexpr, tpe.unexpr)(fields.unexprSeq: _*)(values.unexprSeq: _*)( + renamesFrom.unexprSeq: _* + )(renamesTo.unexprSeq: _*) } } diff --git a/quill-sql/src/main/scala/io/getquill/parser/engine/History.scala b/quill-sql/src/main/scala/io/getquill/parser/engine/History.scala index 70f89ef05..dd06e9e92 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/engine/History.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/engine/History.scala @@ -11,8 +11,8 @@ sealed trait HistoryPart extends History { } object History { - case class Failed(expr: String, parent: History) extends History { val name = "Failed" } + case class Failed(expr: String, parent: History) extends History { val name = "Failed" } case class Matched(chain: ParserChain, parent: History)(expr: => String) extends HistoryPart case class Ignored(chain: ParserChain, parent: History)(expr: => String) extends HistoryPart - case object Root extends History { val name = "Root" } + case object Root extends History { val name = "Root" } } diff --git a/quill-sql/src/main/scala/io/getquill/parser/engine/Parser.scala b/quill-sql/src/main/scala/io/getquill/parser/engine/Parser.scala index 1e257de53..e8166e5e1 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/engine/Parser.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/engine/Parser.scala @@ -7,7 +7,7 @@ import io.getquill.parser.ParserHelpers trait Parser(rootParse: Parser | Parser.Nil)(using Quotes) { import quotes.reflect._ def apply(input: Expr[_])(using History): Ast = attempt.lift(input).getOrElse(error(input)) - protected def error(input: Expr[_]): Nothing = failParse(input, classOf[Ast]) + protected def error(input: Expr[_]): Nothing = failParse(input, classOf[Ast]) protected def attempt: History ?=> PartialFunction[Expr[_], Ast] // Attempt the parser externally. Usually this is the just the `attempt` method // but in some cases we might want early-exist functionality. @@ -23,19 +23,25 @@ object Parser { protected def attempt: History ?=> PartialFunction[Expr[_], Ast] = PartialFunction.empty } - /** Optimizes 'Clause' by checking if it is some given type first. Otherwise can early-exit */ + /** + * Optimizes 'Clause' by checking if it is some given type first. Otherwise + * can early-exit + */ trait PrefilterType[Criteria: Type](using Quotes) extends Parser { import quotes.reflect._ def prefilter(expr: Expr[_]): Boolean = expr.asTerm.tpe <:< TypeRepr.of[Criteria] } - /** Optimizes 'Clause' by allowing a more efficient 'prematch' criteria to be used */ + /** + * Optimizes 'Clause' by allowing a more efficient 'prematch' criteria to be + * used + */ trait Prefilter(using Quotes) extends Parser { def prefilter(expr: Expr[_]): Boolean private[engine] override def attemptProper: History ?=> PartialFunction[Expr[_], Ast] = new PartialFunction[Expr[_], Ast] { - def apply(expr: Expr[_]): Ast = attempt.apply(expr) + def apply(expr: Expr[_]): Ast = attempt.apply(expr) def isDefinedAt(expr: Expr[_]): Boolean = prefilter(expr) && attempt.isDefinedAt(expr) } } diff --git a/quill-sql/src/main/scala/io/getquill/parser/engine/ParserChain.scala b/quill-sql/src/main/scala/io/getquill/parser/engine/ParserChain.scala index e79691bff..e95e715da 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/engine/ParserChain.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/engine/ParserChain.scala @@ -23,28 +23,31 @@ object ParserChain { def attempt[P <: Parser: ClassTag](rootInjector: Parser => P)(using Quotes, TranspileConfig): ParserChain = Attempt[P](rootInjector) - private final case class Attempt[P <: Parser: ClassTag](rootInjector: Parser => P)(using Quotes, TranspileConfig) extends ParserChain { - lazy val name = summon[ClassTag[P]].runtimeClass.getSimpleName + private final case class Attempt[P <: Parser: ClassTag](rootInjector: Parser => P)(using Quotes, TranspileConfig) + extends ParserChain { + lazy val name = summon[ClassTag[P]].runtimeClass.getSimpleName protected def build(rootParse: Parser) = rootInjector(rootParse) } - private final case class OrElse(left: ParserChain, right: ParserChain)(using Quotes, TranspileConfig) extends ParserChain { + private final case class OrElse(left: ParserChain, right: ParserChain)(using Quotes, TranspileConfig) + extends ParserChain { def name = s"${left.name}_or_${right.name}" protected def build(rootParse: Parser): Parser = new Parser(rootParse) { def attempt = { val leftOrRightMatch: PartialFunction[Expr[_], Option[Ast]] = - PartialFunction.fromFunction[Expr[_], Option[Ast]](expr => { - val leftParser = left.build(rootParse) + PartialFunction.fromFunction[Expr[_], Option[Ast]] { expr => + val leftParser = left.build(rootParse) val rightParser = right.build(rootParse) - val history = summon[History] + val history = summon[History] val leftHistory = History.Matched(left, history)(Format.Expr(expr)) // if the left side parser did not match, that means that it was ignored so add that info to the history - val rightHistory = History.Matched(right, History.Ignored(left, history)(Format.Expr(expr)))(Format.Expr(expr)) - val leftLift: Expr[_] => Option[Ast] = leftParser.attemptProper(using leftHistory).lift + val rightHistory = + History.Matched(right, History.Ignored(left, history)(Format.Expr(expr)))(Format.Expr(expr)) + val leftLift: Expr[_] => Option[Ast] = leftParser.attemptProper(using leftHistory).lift val rightLift: Expr[_] => Option[Ast] = rightParser.attemptProper(using rightHistory).lift leftLift(expr).orElse(rightLift(expr)) - }) + } leftOrRightMatch.unlift } // end attempt } diff --git a/quill-sql/src/main/scala/io/getquill/parser/engine/failParse.scala b/quill-sql/src/main/scala/io/getquill/parser/engine/failParse.scala index 5347d1ba0..52a4b76ca 100644 --- a/quill-sql/src/main/scala/io/getquill/parser/engine/failParse.scala +++ b/quill-sql/src/main/scala/io/getquill/parser/engine/failParse.scala @@ -29,13 +29,13 @@ object failParse { val traces = Thread.currentThread.getStackTrace.take(50).map(" " + _.toString).mkString("\n") report.throwError( s"""| - |s"==== ${message} ==== - | ${Format(Printer.TreeShortCode.show(term)) /* Or Maybe just expr? */} - |==== Extractors === - | ${Format(Printer.TreeStructure.show(term))} - |==== Stacktrace === - |${traces}""".stripMargin, + |s"==== ${message} ==== + | ${Format(Printer.TreeShortCode.show(term)) /* Or Maybe just expr? */} + |==== Extractors === + | ${Format(Printer.TreeStructure.show(term))} + |==== Stacktrace === + |${traces}""".stripMargin, expr ) } // end apply -} // end failParse +} // end failParse diff --git a/quill-sql/src/main/scala/io/getquill/quat/QuatMaking.scala b/quill-sql/src/main/scala/io/getquill/quat/QuatMaking.scala index e81c88ca4..ce6c91105 100644 --- a/quill-sql/src/main/scala/io/getquill/quat/QuatMaking.scala +++ b/quill-sql/src/main/scala/io/getquill/quat/QuatMaking.scala @@ -71,7 +71,7 @@ trait QuatMaking extends QuatMakingBase { override def existsEncoderFor(using Quotes)(tpe: quotes.reflect.TypeRepr): Boolean = { import quotes.reflect._ // TODO Try summoning 'value' to know it's a value for sure if a encoder doesn't exist? - def encoderComputation() = { + def encoderComputation() = tpe.asType match { // If an identifier in the Quill query is has a Encoder/Decoder pair, we treat it as a value i.e. Quat.Value is assigned as it's Quat. // however, what do we do if there is only one. Say for Name(value: String), Person(name: Name, age: Int) there is a Name-Decoder @@ -110,7 +110,6 @@ trait QuatMaking extends QuatMakingBase { case _ => false } - } val output = QuatMaking.lookupIsEncodeable(tpe.widen)(encoderComputation) output @@ -139,7 +138,8 @@ trait QuatMakingBase { def nonGenericMethods(using Quotes)(tpe: quotes.reflect.TypeRepr) = tpe.classSymbol.get.memberFields - .filter(m => m.owner.name.toString != "Any" && m.owner.name.toString != "Object").map { param => + .filter(m => m.owner.name.toString != "Any" && m.owner.name.toString != "Object") + .map { param => ( param.name.toString, tpe.memberType(param).simplified @@ -147,7 +147,8 @@ trait QuatMakingBase { // Look up the parameter only if needed. This is typically an expensive operation // if (!param.isParameter) param.typeSignature else param.typeSignature.asSeenFrom(tpe, tpe.typeSymbol) ) - }.toList + } + .toList def caseClassConstructorArgs(using Quotes)(tpe: quotes.reflect.TypeRepr) = { import io.getquill.util.Format @@ -165,7 +166,9 @@ trait QuatMakingBase { } object ArbitraryBaseType { - def unapply(using Quotes)(tpe: quotes.reflect.TypeRepr): Option[(String, List[(String, quotes.reflect.TypeRepr)])] = + def unapply(using + Quotes + )(tpe: quotes.reflect.TypeRepr): Option[(String, List[(String, quotes.reflect.TypeRepr)])] = if (tpe.classSymbol.isDefined) Some((tpe.widen.typeSymbol.name.toString, nonGenericMethods(tpe.widen))) else @@ -180,7 +183,9 @@ trait QuatMakingBase { } object CaseClassBaseType { - def unapply(using Quotes)(tpe: quotes.reflect.TypeRepr): Option[(String, List[(String, quotes.reflect.TypeRepr)])] = + def unapply(using + Quotes + )(tpe: quotes.reflect.TypeRepr): Option[(String, List[(String, quotes.reflect.TypeRepr)])] = if (tpe.classSymbol.isDefined && tpe.widen.typeSymbol.isCaseClass) Some((tpe.widen.typeSymbol.name.toString, caseClassConstructorArgs(tpe.widen))) else @@ -289,7 +294,12 @@ trait QuatMakingBase { import quotes.reflect._ tpe match { case CaseClassBaseType(name, fields) if !existsEncoderFor(tpe) || tpe <:< TypeRepr.of[Udt] => - Some(Quat.Product(name, fields.map { case (fieldName, fieldType) => (fieldName, ParseType.parseType(fieldType)) })) + Some( + Quat.Product( + name, + fields.map { case (fieldName, fieldType) => (fieldName, ParseType.parseType(fieldType)) } + ) + ) case _ => None } @@ -315,7 +325,9 @@ trait QuatMakingBase { Some(tpe) else if (tpe <:< TypeRepr.of[Udt]) None - else if (isType[AnyVal](tpe) && tpe.widen.typeSymbol.isCaseClass && anyValBehavior == AnyValBehavior.TreatAsValue) + else if ( + isType[AnyVal](tpe) && tpe.widen.typeSymbol.isCaseClass && anyValBehavior == AnyValBehavior.TreatAsValue + ) Some(tpe) else if (existsEncoderFor(tpe)) Some(tpe) @@ -410,9 +422,13 @@ trait QuatMakingBase { Expr.summon[Mirror.Of[T]] match { case Some(ev) => ev match { - case '{ $m: Mirror.SumOf[T] { type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes } } => + case '{ + $m: Mirror.SumOf[T] { + type MirroredElemLabels = elementLabels; type MirroredElemTypes = elementTypes + } + } => val coproductQuats = traverseCoproduct[elementTypes](TypeRepr.of[T])(Type.of[elementTypes]) - val reduced = coproductQuats.reduce((q1, q2) => mergeQuats(q1, q2)) + val reduced = coproductQuats.reduce((q1, q2) => mergeQuats(q1, q2)) Some(reduced) case _ => None @@ -477,7 +493,10 @@ trait QuatMakingBase { case (key, Some(first), Some(second)) => (key, mergeQuats(first, second)) case (key, Some(first), None) => (key, first) case (key, None, Some(second)) => (key, second) - case (key, None, None) => throw new IllegalArgumentException(s"Invalid state for Quat key ${key}, both values of merging quats were null") + case (key, None, None) => + throw new IllegalArgumentException( + s"Invalid state for Quat key ${key}, both values of merging quats were null" + ) } Quat.Product(second.name, newFields) @@ -485,7 +504,8 @@ trait QuatMakingBase { firstQuat.leastUpperType(secondQuat) match { case Some(value) => value // TODO Get field names for these quats if they are inside something else? - case None => throw new IllegalArgumentException(s"Could not create coproduct by merging quats ${q1} and ${q2}") + case None => + throw new IllegalArgumentException(s"Could not create coproduct by merging quats ${q1} and ${q2}") } } } // end CoProduct @@ -530,4 +550,4 @@ trait QuatMakingBase { } } // end InferQuat -} // end QuatMakingBase +} // end QuatMakingBase diff --git a/quill-sql/src/main/scala/io/getquill/util/Format.scala b/quill-sql/src/main/scala/io/getquill/util/Format.scala index 349df7cc3..393179590 100644 --- a/quill-sql/src/main/scala/io/getquill/util/Format.scala +++ b/quill-sql/src/main/scala/io/getquill/util/Format.scala @@ -48,7 +48,10 @@ object Format { } } - /** Same as TypeRepr but also widens the type since frequently types are singleton i.e. 'person.name' has the type 'name' as opposed to String */ + /** + * Same as TypeRepr but also widens the type since frequently types are + * singleton i.e. 'person.name' has the type 'name' as opposed to String + */ object TypeReprW { def apply(typeRepr: Quotes#reflectModule#TypeRepr)(using qctx: Quotes) = { import qctx.reflect._ @@ -111,8 +114,8 @@ object Format { def apply(code: String, showErrorTrace: Boolean = false) = { val encosedCode = s"""|object DummyEnclosure { - | ${code} - |}""".stripMargin + | ${code} + |}""".stripMargin // NOTE: Very ineffifient way to get rid of DummyEnclosure on large blocks of code // use only for debugging purposes! diff --git a/quill-sql/src/main/scala/io/getquill/util/Interpolator2.scala b/quill-sql/src/main/scala/io/getquill/util/Interpolator2.scala index ec6c61a48..804341b49 100644 --- a/quill-sql/src/main/scala/io/getquill/util/Interpolator2.scala +++ b/quill-sql/src/main/scala/io/getquill/util/Interpolator2.scala @@ -16,19 +16,20 @@ object TraceConfig { } class Interpolator2( - traceType: TraceType, - traceConfig: TraceConfig, - defaultIndent: Int = 0, - color: Boolean = Messages.traceColors, - qprint: AstPrinter = Messages.qprint, - out: PrintStream = System.out, - globalTracesEnabled: (TraceType) => Boolean = Messages.tracesEnabled(_) + traceType: TraceType, + traceConfig: TraceConfig, + defaultIndent: Int = 0, + color: Boolean = Messages.traceColors, + qprint: AstPrinter = Messages.qprint, + out: PrintStream = System.out, + globalTracesEnabled: (TraceType) => Boolean = Messages.tracesEnabled(_) ) { implicit class InterpolatorExt(sc: StringContext) { def trace(elements: Any*) = new Traceable(sc, elements) } - def tracesEnabled(traceType: TraceType) = traceConfig.enabledTraces.contains(traceType) || globalTracesEnabled(traceType) + def tracesEnabled(traceType: TraceType) = + traceConfig.enabledTraces.contains(traceType) || globalTracesEnabled(traceType) class Traceable(sc: StringContext, elementsSeq: Seq[Any]) { @@ -36,9 +37,9 @@ class Interpolator2( private sealed trait PrintElement private case class Str(str: String, first: Boolean) extends PrintElement - private case class Elem(value: String) extends PrintElement - private case class Simple(value: String) extends PrintElement - private case object Separator extends PrintElement + private case class Elem(value: String) extends PrintElement + private case class Simple(value: String) extends PrintElement + private case object Separator extends PrintElement implicit class StrOps(str: String) { def reallyFitsOnOneLine: Boolean = { @@ -64,7 +65,7 @@ class Interpolator2( private def generateStringForCommand(value: Any, indent: Int) = { val objectString = qprint(value).string(color) - val oneLine = objectString.reallyFitsOnOneLine + val oneLine = objectString.reallyFitsOnOneLine oneLine match { case true => s"${indent.prefix}> ${objectString}" case false => @@ -82,17 +83,17 @@ class Interpolator2( sealed trait Splice { def value: String } object Splice { case class Simple(value: String) extends Splice // Simple splice into the string, don't indent etc... - case class Show(value: String) extends Splice // Indent, colorize the element etc... + case class Show(value: String) extends Splice // Indent, colorize the element etc... } private def readBuffers() = { def orZero(i: Int): Int = if (i < 0) 0 else i val parts = sc.parts.iterator.toList - val elements = elementsSeq.toList.map(elem => { + val elements = elementsSeq.toList.map { elem => if (elem.isInstanceOf[String]) Splice.Simple(elem.asInstanceOf[String]) else Splice.Show(qprint(elem).string(color)) - }) + } val (firstStr, explicitIndent) = readFirst(parts.head) val indent = @@ -183,7 +184,7 @@ class Interpolator2( command } - def andReturn[T](command: => T) = { + def andReturn[T](command: => T) = logIfEnabled() match { case Some((output, indent)) => // do the initial log @@ -196,9 +197,8 @@ class Interpolator2( case None => command } - } - def andReturnLog[T, L](command: => (T, L)) = { + def andReturnLog[T, L](command: => (T, L)) = logIfEnabled() match { case Some((output, indent)) => // do the initial log @@ -211,9 +211,8 @@ class Interpolator2( case None => command } - } - def andReturnIf[T](command: => T)(showIf: T => Boolean) = { + def andReturnIf[T](command: => T)(showIf: T => Boolean) = logIfEnabled() match { case Some((output, indent)) => // Even though we usually want to evaluate the command after the initial log was done @@ -232,6 +231,5 @@ class Interpolator2( case None => command } - } } } diff --git a/quill-sql/src/main/scala/io/getquill/util/LoadObject.scala b/quill-sql/src/main/scala/io/getquill/util/LoadObject.scala index 2c9442920..1a71f56c1 100644 --- a/quill-sql/src/main/scala/io/getquill/util/LoadObject.scala +++ b/quill-sql/src/main/scala/io/getquill/util/LoadObject.scala @@ -19,14 +19,14 @@ object Load { private[Load] sealed trait SymbolLoadType { def path: String } private[Load] object SymbolLoadType { - case class Class(path: String) extends SymbolLoadType + case class Class(path: String) extends SymbolLoadType case class Module(path: String) extends SymbolLoadType } object Module { def fromClassTag[T](implicit tag: ClassTag[T]): Try[T] = Try { - val cls = java.lang.Class.forName(`endWith$`(tag.runtimeClass.getName)) + val cls = java.lang.Class.forName(`endWith$`(tag.runtimeClass.getName)) val field = cls.getField("MODULE$") field.get(cls).asInstanceOf[T] } @@ -37,7 +37,7 @@ object Load { sym <- symbolType(loadClassType) objectLoad <- Try { - val className = sym.path + val className = sym.path val clsFullRaw = `endWith$`(className) // TODO This is a hack! Need to actually use scala compile-time tpe.memberType(tpe.owner) over and over @@ -45,7 +45,7 @@ object Load { // Replace io.getquill.Foo$.Bar$ with io.getquill.Foo$Bar which is the java convention for nested modules val clsFull = clsFullRaw.replace("$.", "$") - val cls = java.lang.Class.forName(clsFull) + val cls = java.lang.Class.forName(clsFull) val field = cls.getField("MODULE$") field.get(cls) } @@ -66,7 +66,11 @@ object Load { sym <- symLoad match { case SymbolLoadType.Module(path) => - Failure(throw new IllegalArgumentException(s"${Format.TypeRepr(loadClassType)} must not be a class type because it has no class symbol.")) + Failure( + throw new IllegalArgumentException( + s"${Format.TypeRepr(loadClassType)} must not be a class type because it has no class symbol." + ) + ) case SymbolLoadType.Class(path) => Success(path) } @@ -77,18 +81,23 @@ object Load { private[Load] def symbolType(using Quotes)(loadClassType: quotes.reflect.TypeRepr): Try[SymbolLoadType] = { val traceConfig = SummonTranspileConfig().traceConfig - val interp = new Interpolator(TraceType.Warning, traceConfig, 1) + val interp = new Interpolator(TraceType.Warning, traceConfig, 1) import interp._ Try { loadClassType.classSymbol match { case Some(value) => Success(SymbolLoadType.Class(value.fullName)) case None => - trace"${Format.TypeRepr(loadClassType)} must not be a class type because it has no class symbol. Attempting to load it as a module.".andLog() + trace"${Format.TypeRepr(loadClassType)} must not be a class type because it has no class symbol. Attempting to load it as a module." + .andLog() if (!loadClassType.termSymbol.moduleClass.isNoSymbol) Success(SymbolLoadType.Module(loadClassType.termSymbol.moduleClass.fullName)) else - Failure(new IllegalArgumentException(s"The class ${Format.TypeRepr(loadClassType.widen)} cannot be loaded because it not a static module. Either it is a class or some other dynamic value.")) + Failure( + new IllegalArgumentException( + s"The class ${Format.TypeRepr(loadClassType.widen)} cannot be loaded because it not a static module. Either it is a class or some other dynamic value." + ) + ) } }.flatten } diff --git a/quill-sql/src/main/scala/io/getquill/util/LoadObjectTest.scala b/quill-sql/src/main/scala/io/getquill/util/LoadObjectTest.scala index 636411be7..1d1b6c68a 100644 --- a/quill-sql/src/main/scala/io/getquill/util/LoadObjectTest.scala +++ b/quill-sql/src/main/scala/io/getquill/util/LoadObjectTest.scala @@ -2,6 +2,5 @@ package io.getquill.util import io.getquill.Literal -@main def testLoad() = { +@main def testLoad() = loadMac[Literal] -} diff --git a/quill-sql/src/main/scala/io/getquill/util/ProtoMessages.scala b/quill-sql/src/main/scala/io/getquill/util/ProtoMessages.scala index 5e39050ce..aea0ad657 100644 --- a/quill-sql/src/main/scala/io/getquill/util/ProtoMessages.scala +++ b/quill-sql/src/main/scala/io/getquill/util/ProtoMessages.scala @@ -10,14 +10,18 @@ object ProtoMessages { private def variable(propName: String, envName: String, default: String) = Option(System.getProperty(propName)).orElse(sys.env.get(envName)).getOrElse(default) - private[getquill] def resetCache(): Unit = cacheMap.clear() + private[getquill] def resetCache(): Unit = cacheMap.clear() private val cacheMap: MutableMap[String, Any] = MutableMap() private def cache[T](name: String, value: => T): T = cacheMap.getOrElseUpdate(name, value).asInstanceOf[T] - private[getquill] def useStdOut = cache("quill.macro.stdout", variable("quill.macro.stdout", "quill_macro_stdout", "false").toBoolean) - private[getquill] def serializeAst = cache("quill.ast.serialize", variable("quill.ast.serialize", "quill_ast_serialize", "true").toBoolean) - private[getquill] def maxQuatFields = cache("quill.quat.tooManyFields", variable("quill.quat.tooManyFields", "quill_quat_tooManyFields", "4").toInt) - private[getquill] def errorDetail = cache("quill.error.detail", variable("quill.error.detail", "quill_error_detail", "false").toBoolean) + private[getquill] def useStdOut = + cache("quill.macro.stdout", variable("quill.macro.stdout", "quill_macro_stdout", "false").toBoolean) + private[getquill] def serializeAst = + cache("quill.ast.serialize", variable("quill.ast.serialize", "quill_ast_serialize", "true").toBoolean) + private[getquill] def maxQuatFields = + cache("quill.quat.tooManyFields", variable("quill.quat.tooManyFields", "quill_quat_tooManyFields", "4").toInt) + private[getquill] def errorDetail = + cache("quill.error.detail", variable("quill.error.detail", "quill_error_detail", "false").toBoolean) } // end ProtoMessages diff --git a/quill-sql/src/main/scala/io/getquill/util/SummonMac.scala b/quill-sql/src/main/scala/io/getquill/util/SummonMac.scala index 87d924f97..d43ac8d52 100644 --- a/quill-sql/src/main/scala/io/getquill/util/SummonMac.scala +++ b/quill-sql/src/main/scala/io/getquill/util/SummonMac.scala @@ -23,7 +23,10 @@ object SummonMac { actualType match { case '[t] => val loaded = - Load.Module[t].getOrElse { report.throwError(s"Could not summon genie of type: ${Format.TypeOf[t]}") }.asInstanceOf[Genie] + Load + .Module[t] + .getOrElse(report.throwError(s"Could not summon genie of type: ${Format.TypeOf[t]}")) + .asInstanceOf[Genie] println("My Greeting Is: " + loaded.greet) } case None => diff --git a/quill-sql/src/main/scala/io/getquill/util/debug/PrintMac.scala b/quill-sql/src/main/scala/io/getquill/util/debug/PrintMac.scala index a816fb8b7..2a9e5f645 100644 --- a/quill-sql/src/main/scala/io/getquill/util/debug/PrintMac.scala +++ b/quill-sql/src/main/scala/io/getquill/util/debug/PrintMac.scala @@ -8,13 +8,23 @@ import io.getquill.metaprog.DeserializeAstInstances object PrintMac { - inline def apply(inline any: Any, inline showDetail: Boolean = false, inline deserializeAst: Boolean = false): Unit = ${ printMacImpl('any, 'showDetail, 'deserializeAst) } - inline def passthrough(inline any: Any, inline showDetail: Boolean = false, inline deserializeAst: Boolean = false): Any = ${ printMacImpl('any, 'showDetail, 'deserializeAst) } - - def printMacImpl(anyRaw: Expr[Any], showDetailRaw: Expr[Boolean], deserializeAstRaw: Expr[Boolean])(using Quotes): Expr[Any] = { + inline def apply(inline any: Any, inline showDetail: Boolean = false, inline deserializeAst: Boolean = false): Unit = + ${ printMacImpl('any, 'showDetail, 'deserializeAst) } + inline def passthrough( + inline any: Any, + inline showDetail: Boolean = false, + inline deserializeAst: Boolean = false + ): Any = ${ printMacImpl('any, 'showDetail, 'deserializeAst) } + + def printMacImpl(anyRaw: Expr[Any], showDetailRaw: Expr[Boolean], deserializeAstRaw: Expr[Boolean])(using + Quotes + ): Expr[Any] = { import quotes.reflect._ - val showDetail = Expr.unapply(deserializeAstRaw).getOrElse { report.throwError("showDetail must be a constant value true/false") } - val deserializeAst = Expr.unapply(deserializeAstRaw).getOrElse { report.throwError("deserializeAst must be a constant value true/false") } + val showDetail = + Expr.unapply(deserializeAstRaw).getOrElse(report.throwError("showDetail must be a constant value true/false")) + val deserializeAst = Expr.unapply(deserializeAstRaw).getOrElse { + report.throwError("deserializeAst must be a constant value true/false") + } val any = anyRaw.asTerm.underlyingArgument.asExpr val deser = diff --git a/quill-sql/src/main/scala/io/getquill/util/prep/Hierarchies.scala b/quill-sql/src/main/scala/io/getquill/util/prep/Hierarchies.scala index fa7d0483a..931d99364 100644 --- a/quill-sql/src/main/scala/io/getquill/util/prep/Hierarchies.scala +++ b/quill-sql/src/main/scala/io/getquill/util/prep/Hierarchies.scala @@ -6,20 +6,20 @@ object Mod { // val inst = Inst("instValValue") // def inst - def modAp() = "modApValue" - def modDef = "modDefValue" - val modVal = "modValValue" + def modAp() = "modApValue" + def modDef = "modDefValue" + val modVal = "modValValue" val modIntVal = 123 object Foo { def fooAp() = "fooApValue" - def fooDef = "fooDefValue" - val fooVal = "fooValValue" + def fooDef = "fooDefValue" + val fooVal = "fooValValue" object Bar { def barAp() = "barApValue" - def barDef = "barDefValue" - val barVal = "barValValue" + def barDef = "barDefValue" + val barVal = "barValValue" } } } diff --git a/quill-sql/src/test/scala/io/getquill/BatchActionMultiTest.scala b/quill-sql/src/test/scala/io/getquill/BatchActionMultiTest.scala index deec1c378..ffe7c4442 100644 --- a/quill-sql/src/test/scala/io/getquill/BatchActionMultiTest.scala +++ b/quill-sql/src/test/scala/io/getquill/BatchActionMultiTest.scala @@ -18,12 +18,14 @@ import io.getquill.util.debug.PrintMac class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDialect, Literal] { // Need to fully type this otherwise scala compiler thinks it's still just 'Context' from the super-class // and the extensions (m: MirrorContext[_, _]#BatchActionMirror) etc... classes in Spec don't match their types correctly - val ctx: MirrorContext[PostgresDialect, Literal] = new MirrorContext[PostgresDialect, Literal](PostgresDialect, Literal) + val ctx: MirrorContext[PostgresDialect, Literal] = + new MirrorContext[PostgresDialect, Literal](PostgresDialect, Literal) import ctx._ "Multi-row Batch Action Should work with" - { "inserts > batch-size - (2rows + 2rows) + (1row)" - { - val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + val people = + List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) def expect(executionType: ExecutionType) = List( ( @@ -61,12 +63,21 @@ class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDi ) ) "static - mixed" in { - val static = ctx.run(liftQuery(people).foreach(p => query[Person].insert(_.id -> p.id, _.name -> (lift("foo") + p.name + "bar"), _.age -> p.age)), 2) + val static = ctx.run( + liftQuery(people).foreach(p => + query[Person].insert(_.id -> p.id, _.name -> (lift("foo") + p.name + "bar"), _.age -> p.age) + ), + 2 + ) static.tripleBatchMulti mustEqual expect2(ExecutionType.Static) } "dynamic - mixed" in { // TODO Why does it not print that a dynamic query is being run? - val q = quote(liftQuery(people).foreach(p => query[Person].insert(_.id -> p.id, _.name -> (lift("foo") + p.name + "bar"), _.age -> p.age))) + val q = quote( + liftQuery(people).foreach(p => + query[Person].insert(_.id -> p.id, _.name -> (lift("foo") + p.name + "bar"), _.age -> p.age) + ) + ) val static = ctx.run(q, 2) static.tripleBatchMulti mustEqual expect2(ExecutionType.Dynamic) } @@ -141,12 +152,19 @@ class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDi "fallback for non-insert query (in a context that doesn't support update)" - { val ctx: MirrorContext[MySQLDialect, Literal] = new MirrorContext[MySQLDialect, Literal](MySQLDialect, Literal) import ctx._ - val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + val people = + List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) def expect(executionType: ExecutionType) = List( ( "UPDATE Person pt SET id = ?, name = ?, age = ? WHERE pt.id = ?", - List(List(1, "A", 111, 1), List(2, "B", 222, 2), List(3, "C", 333, 3), List(4, "D", 444, 4), List(5, "E", 555, 5)), + List( + List(1, "A", 111, 1), + List(2, "B", 222, 2), + List(3, "C", 333, 3), + List(4, "D", 444, 4), + List(5, "E", 555, 5) + ), executionType ) ) @@ -163,7 +181,8 @@ class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDi } "update query" - { - val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + val people = + List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) def expect(executionType: ExecutionType) = List( ( @@ -190,7 +209,8 @@ class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDi } "supported contexts" - { - val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + val people = + List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) def makeRow(executionType: ExecutionType)(queryA: String, queryB: String) = List( ( @@ -232,36 +252,55 @@ class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDi "postgres - regular/returning" in { val ctx: MirrorContext[PostgresDialect, Literal] = new MirrorContext(PostgresDialect, Literal) import ctx._ - ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) - ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expectPostgresReturning(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect( + ExecutionType.Static + ) + ctx + .run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2) + .tripleBatchMulti mustEqual expectPostgresReturning(ExecutionType.Static) } "sqlserver - regular/returning" in { val ctx: MirrorContext[SQLServerDialect, Literal] = new MirrorContext(SQLServerDialect, Literal) import ctx._ - ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) - ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expectSqlServerReturning(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect( + ExecutionType.Static + ) + ctx + .run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2) + .tripleBatchMulti mustEqual expectSqlServerReturning(ExecutionType.Static) } "mysql - regular/returning" in { val ctx: MirrorContext[MySQLDialect, Literal] = new MirrorContext(MySQLDialect, Literal) import ctx._ - ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) - ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect( + ExecutionType.Static + ) + ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expect( + ExecutionType.Static + ) } "h2 - regular/returning" in { val ctx: MirrorContext[H2Dialect, Literal] = new MirrorContext(H2Dialect, Literal) import ctx._ - ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expectH2(ExecutionType.Static) - ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expectH2(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expectH2( + ExecutionType.Static + ) + ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2).tripleBatchMulti mustEqual expectH2( + ExecutionType.Static + ) } "sqlite - only regular" in { val ctx: MirrorContext[SqliteDialect, Literal] = new MirrorContext(SqliteDialect, Literal) import ctx._ - ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect(ExecutionType.Static) + ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2).tripleBatchMulti mustEqual expect( + ExecutionType.Static + ) } } "fallback for non-supported context" - { - val people = List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) + val people = + List(Person(1, "A", 111), Person(2, "B", 222), Person(3, "C", 333), Person(4, "D", 444), Person(5, "E", 555)) def expect(executionType: ExecutionType) = List( ( @@ -272,7 +311,8 @@ class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDi ) "oracle" - { - val ctx: MirrorContext[OracleDialect, Literal] = new MirrorContext[OracleDialect, Literal](OracleDialect, Literal) + val ctx: MirrorContext[OracleDialect, Literal] = + new MirrorContext[OracleDialect, Literal](OracleDialect, Literal) import ctx._ "static" in { val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p)), 2) @@ -284,7 +324,8 @@ class BatchActionMultiTest extends Spec with Inside with SuperContext[PostgresDi } } "sqlite - with returning clause" - { - val ctx: MirrorContext[OracleDialect, Literal] = new MirrorContext[OracleDialect, Literal](OracleDialect, Literal) + val ctx: MirrorContext[OracleDialect, Literal] = + new MirrorContext[OracleDialect, Literal](OracleDialect, Literal) import ctx._ "static" in { val static = ctx.run(liftQuery(people).foreach(p => insertPeople(p).returning(_.id)), 2) diff --git a/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala b/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala index 320e298a4..374187110 100644 --- a/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala +++ b/quill-sql/src/test/scala/io/getquill/BatchActionTest.scala @@ -37,18 +37,24 @@ trait SuperContext[D <: io.getquill.idiom.Idiom, N <: NamingStrategy] { class BatchActionTest extends Spec with Inside with SuperContext[MirrorSqlDialectWithReturnClause, Literal] { // Need to fully type this otherwise scala compiler thinks it's still just 'Context' from the super-class // and the extensions (m: MirrorContext[_, _]#BatchActionMirror) etc... classes in Spec don't match their types correctly - val ctx: MirrorContext[MirrorSqlDialectWithReturnClause, Literal] = new MirrorContext[MirrorSqlDialectWithReturnClause, Literal](MirrorSqlDialectWithReturnClause, Literal) + val ctx: MirrorContext[MirrorSqlDialectWithReturnClause, Literal] = + new MirrorContext[MirrorSqlDialectWithReturnClause, Literal](MirrorSqlDialectWithReturnClause, Literal) import ctx._ val people = List(Person(1, "Joe", 123), Person(2, "Jill", 456)) "batch action with returning should work with" - { "insert - returning" in { - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].insertValue(p).returning(p => p.id)) } - mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?) RETURNING id", List(List(1, "Joe", 123), List(2, "Jill", 456)), Static) + val mirror = ctx.run(liftQuery(people).foreach(p => query[Person].insertValue(p).returning(p => p.id))) + mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?) RETURNING id", List( + List(1, "Joe", 123), + List(2, "Jill", 456) + ), Static) } "insert - returningGenerated" in { - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].insertValue(p).returningGenerated(p => p.id)) } + val mirror = ctx.run { + liftQuery(people).foreach(p => query[Person].insertValue(p).returningGenerated(p => p.id)) + } mirror.triple mustEqual ( "INSERT INTO Person (name,age) VALUES (?, ?) RETURNING id", // The ids should be removed from the lifts list since their corresponding columns are removed (i.e. in the expanded insert assignments) @@ -59,8 +65,13 @@ class BatchActionTest extends Spec with Inside with SuperContext[MirrorSqlDialec // update returning with filter, not very useful but good baseline "update - returning" in { - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].filter(pf => pf.id == p.id).updateValue(p).returning(p => p.id)) } - mirror.triple mustEqual ("UPDATE Person AS pf SET id = ?, name = ?, age = ? WHERE pf.id = ? RETURNING id", List(List(1, "Joe", 123, 1), List(2, "Jill", 456, 2)), Static) + val mirror = ctx.run { + liftQuery(people).foreach(p => query[Person].filter(pf => pf.id == p.id).updateValue(p).returning(p => p.id)) + } + mirror.triple mustEqual ("UPDATE Person AS pf SET id = ?, name = ?, age = ? WHERE pf.id = ? RETURNING id", List( + List(1, "Joe", 123, 1), + List(2, "Jill", 456, 2) + ), Static) } // TODO dsl does not support this yet but would be quite useful @@ -76,138 +87,228 @@ class BatchActionTest extends Spec with Inside with SuperContext[MirrorSqlDialec liftQuery(people).foreach(p => query[Person].insertValue(p)) } val mirror = ctx.run(q) - mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List(List(1, "Joe", 123), List(2, "Jill", 456)), Dynamic) + mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List( + List(1, "Joe", 123), + List(2, "Jill", 456) + ), Dynamic) } "insert" in { - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].insertValue(p)) } - mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List(List(1, "Joe", 123), List(2, "Jill", 456)), Static) + val mirror = ctx.run(liftQuery(people).foreach(p => query[Person].insertValue(p))) + mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List( + List(1, "Joe", 123), + List(2, "Jill", 456) + ), Static) } case class Vip(vipId: Int, vipName: String, vipAge: Int, other: String) "insert - different-objects" in { val vips = List(Vip(1, "Joe", 123, "Something"), Vip(2, "Jill", 456, "Something")) - val mirror = ctx.run { liftQuery(vips).foreach(v => query[Person].insertValue(Person(v.vipId, v.vipName, v.vipAge))) } - mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List(List(1, "Joe", 123), List(2, "Jill", 456)), Static) + val mirror = ctx.run { + liftQuery(vips).foreach(v => query[Person].insertValue(Person(v.vipId, v.vipName, v.vipAge))) + } + mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List( + List(1, "Joe", 123), + List(2, "Jill", 456) + ), Static) } "update - liftQuery scalars" in { - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => query[Person].filter(p => p.id == i).update(_.age -> 111)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = 111 WHERE p.id = ?", List(List(1), List(2), List(3)), Static) + val mirror = ctx.run { + liftQuery(List(1, 2, 3)).foreach(i => query[Person].filter(p => p.id == i).update(_.age -> 111)) + } + mirror.triple mustEqual ("UPDATE Person AS p SET age = 111 WHERE p.id = ?", List( + List(1), + List(2), + List(3) + ), Static) } "update - liftQuery scalars - dynamic" in { - val updateDynamic = quote { - (i: Int) => query[Person].filter(p => p.id == i).update(_.age -> 111) + val updateDynamic = quote { (i: Int) => + query[Person].filter(p => p.id == i).update(_.age -> 111) } - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = 111 WHERE p.id = ?", List(List(1), List(2), List(3)), Dynamic) + val mirror = ctx.run(liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i))) + mirror.triple mustEqual ("UPDATE Person AS p SET age = 111 WHERE p.id = ?", List( + List(1), + List(2), + List(3) + ), Dynamic) } "update - extra lift" in { // Future UseCase Note a filtered-insert does not make sense this way, should have a specific warning about it (i.e. that it's not supported because it's a filtered insert) // val mirror = ctx.run { query[Person].filter(p => p.id == 123).insertValue(people(0)) } // - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].filter(p => p.id == lift(36)).updateValue(p)) } - mirror.triple mustEqual ("UPDATE Person AS p SET id = ?, name = ?, age = ? WHERE p.id = ?", List(List(1, "Joe", 123, 36), List(2, "Jill", 456, 36)), Static) + val mirror = ctx.run { + liftQuery(people).foreach(p => query[Person].filter(p => p.id == lift(36)).updateValue(p)) + } + mirror.triple mustEqual ("UPDATE Person AS p SET id = ?, name = ?, age = ? WHERE p.id = ?", List( + List(1, "Joe", 123, 36), + List(2, "Jill", 456, 36) + ), Static) } "update - extra lift + scalars" in { - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => query[Person].filter(p => p.id == lift(36)).update(_.age -> i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ?", List(List(1, 36), List(2, 36), List(3, 36)), Static) + val mirror = ctx.run { + liftQuery(List(1, 2, 3)).foreach(i => query[Person].filter(p => p.id == lift(36)).update(_.age -> i)) + } + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ?", List( + List(1, 36), + List(2, 36), + List(3, 36) + ), Static) } "update - extra lift + scalars + multi-use" in { - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => query[Person].filter(p => p.id == i && p.age == lift(123)).update(_.age -> i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ? AND p.age = ?", List(List(1, 1, 123), List(2, 2, 123), List(3, 3, 123)), Static) + val mirror = ctx.run { + liftQuery(List(1, 2, 3)).foreach(i => + query[Person].filter(p => p.id == i && p.age == lift(123)).update(_.age -> i) + ) + } + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ? AND p.age = ?", List( + List(1, 1, 123), + List(2, 2, 123), + List(3, 3, 123) + ), Static) } "update - extra lift + scalars + liftQuery/setContains" in { - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id)).update(_.age -> i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?)", List(List(1, 36, 49), List(2, 36, 49), List(3, 36, 49)), Static) + val mirror = ctx.run { + liftQuery(List(1, 2, 3)).foreach(i => + query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id)).update(_.age -> i) + ) + } + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?)", List( + List(1, 36, 49), + List(2, 36, 49), + List(3, 36, 49) + ), Static) } "update - extra lift + scalars + liftQuery/setContains + others" in { - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id) && p.id == lift(789)).update(_.age -> i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?) AND p.id = ?", List(List(1, 36, 49, 789), List(2, 36, 49, 789), List(3, 36, 49, 789)), Static) + val mirror = ctx.run { + liftQuery(List(1, 2, 3)).foreach(i => + query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id) && p.id == lift(789)).update(_.age -> i) + ) + } + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?) AND p.id = ?", List( + List(1, 36, 49, 789), + List(2, 36, 49, 789), + List(3, 36, 49, 789) + ), Static) } "update - extra lift - dynamic" in { - val updateDynamic = quote { - (p: Person) => query[Person].filter(p => p.id == lift(36)).updateValue(p) + val updateDynamic = quote { (p: Person) => + query[Person].filter(p => p.id == lift(36)).updateValue(p) } - val mirror = ctx.run { liftQuery(people).foreach(p => updateDynamic(p)) } - mirror.triple mustEqual ("UPDATE Person AS p SET id = ?, name = ?, age = ? WHERE p.id = ?", List(List(1, "Joe", 123, 36), List(2, "Jill", 456, 36)), Dynamic) + val mirror = ctx.run(liftQuery(people).foreach(p => updateDynamic(p))) + mirror.triple mustEqual ("UPDATE Person AS p SET id = ?, name = ?, age = ? WHERE p.id = ?", List( + List(1, "Joe", 123, 36), + List(2, "Jill", 456, 36) + ), Dynamic) } "update - extra lift - dynamic + scalars" in { - val updateDynamic = quote { - (i: Int) => query[Person].filter(p => p.id == lift(36)).update(_.age -> i) + val updateDynamic = quote { (i: Int) => + query[Person].filter(p => p.id == lift(36)).update(_.age -> i) } - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ?", List(List(1, 36), List(2, 36), List(3, 36)), Dynamic) + val mirror = ctx.run(liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i))) + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ?", List( + List(1, 36), + List(2, 36), + List(3, 36) + ), Dynamic) } "update - extra lift - dynamic + scalars + multi-use" in { - val updateDynamic = quote { - (i: Int) => query[Person].filter(p => p.id == i && p.age == lift(123)).update(_.age -> i) + val updateDynamic = quote { (i: Int) => + query[Person].filter(p => p.id == i && p.age == lift(123)).update(_.age -> i) } - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ? AND p.age = ?", List(List(1, 1, 123), List(2, 2, 123), List(3, 3, 123)), Dynamic) + val mirror = ctx.run(liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i))) + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id = ? AND p.age = ?", List( + List(1, 1, 123), + List(2, 2, 123), + List(3, 3, 123) + ), Dynamic) } "update - extra lift - dynamic + scalars + liftQuery/setContains" in { - val updateDynamic = quote { - (i: Int) => query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id)).update(_.age -> i) + val updateDynamic = quote { (i: Int) => + query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id)).update(_.age -> i) } - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?)", List(List(1, 36, 49), List(2, 36, 49), List(3, 36, 49)), Dynamic) + val mirror = ctx.run(liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i))) + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?)", List( + List(1, 36, 49), + List(2, 36, 49), + List(3, 36, 49) + ), Dynamic) } "update - extra lift - dynamic + scalars + liftQuery/setContains + others" in { - val updateDynamic = quote { - (i: Int) => query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id) && p.id == lift(789)).update(_.age -> i) + val updateDynamic = quote { (i: Int) => + query[Person].filter(p => liftQuery(List(36, 49)).contains(p.id) && p.id == lift(789)).update(_.age -> i) } - val mirror = ctx.run { liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i)) } - mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?) AND p.id = ?", List(List(1, 36, 49, 789), List(2, 36, 49, 789), List(3, 36, 49, 789)), Dynamic) + val mirror = ctx.run(liftQuery(List(1, 2, 3)).foreach(i => updateDynamic(i))) + mirror.triple mustEqual ("UPDATE Person AS p SET age = ? WHERE p.id IN (?, ?) AND p.id = ?", List( + List(1, 36, 49, 789), + List(2, 36, 49, 789), + List(3, 36, 49, 789) + ), Dynamic) } case class MyPerson(id: Int, name: String, birthYear: Int) "update via tuple" in { val birthYearUpdates = List((3431, 1983), (2976, 1972), (1511, 1991)) // // /// // // // val a = ctx.run { - liftQuery(birthYearUpdates).foreach { - case (id, year) => - query[MyPerson].filter(p => p.id == id).update(p => p.birthYear -> year) + liftQuery(birthYearUpdates).foreach { case (id, year) => + query[MyPerson].filter(p => p.id == id).update(p => p.birthYear -> year) } } - a.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List(List(1983, 3431), List(1972, 2976), List(1991, 1511)), Static) + a.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List( + List(1983, 3431), + List(1972, 2976), + List(1991, 1511) + ), Static) val b = ctx.run { liftQuery(birthYearUpdates).foreach((id, year) => query[MyPerson].filter(p => p.id == id).update(p => p.birthYear -> year) ) } - b.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List(List(1983, 3431), List(1972, 2976), List(1991, 1511)), Static) + b.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List( + List(1983, 3431), + List(1972, 2976), + List(1991, 1511) + ), Static) } "update via tuple - dynamic" in { - val updateDynamic = quote { - (id: Int, year: Int) => query[MyPerson].filter(p => p.id == id).update(p => p.birthYear -> year) + val updateDynamic = quote { (id: Int, year: Int) => + query[MyPerson].filter(p => p.id == id).update(p => p.birthYear -> year) } val birthYearUpdates = List((3431, 1983), (2976, 1972), (1511, 1991)) val a = ctx.run { - liftQuery(birthYearUpdates).foreach { - case (id, year) => updateDynamic(id, year) + liftQuery(birthYearUpdates).foreach { case (id, year) => + updateDynamic(id, year) } } - a.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List(List(1983, 3431), List(1972, 2976), List(1991, 1511)), Dynamic) + a.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List( + List(1983, 3431), + List(1972, 2976), + List(1991, 1511) + ), Dynamic) val b = ctx.run { liftQuery(birthYearUpdates).foreach((id, year) => updateDynamic(id, year)) } - b.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List(List(1983, 3431), List(1972, 2976), List(1991, 1511)), Dynamic) + b.triple mustEqual ("UPDATE MyPerson AS p SET birthYear = ? WHERE p.id = ?", List( + List(1983, 3431), + List(1972, 2976), + List(1991, 1511) + ), Dynamic) // Does not work, variable tracking has an issue // val b = ctx.run { @@ -217,23 +318,36 @@ class BatchActionTest extends Spec with Inside with SuperContext[MirrorSqlDialec } "insert with function splice" in { - val mirror = ctx.run { liftQuery(people).foreach(p => insertPeople(p)) } - mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List(List(1, "Joe", 123), List(2, "Jill", 456)), Static) + val mirror = ctx.run(liftQuery(people).foreach(p => insertPeople(p))) + mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List( + List(1, "Joe", 123), + List(2, "Jill", 456) + ), Static) } "insert with dynamic function splice" in { // I.e. splicing the insertPeopleDynamic segment should make the whole query dynamic... and it should still work - val mirror = ctx.run { liftQuery(people).foreach(p => insertPeopleDynamic(p)) } - mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List(List(1, "Joe", 123), List(2, "Jill", 456)), Dynamic) + val mirror = ctx.run(liftQuery(people).foreach(p => insertPeopleDynamic(p))) + mirror.triple mustEqual ("INSERT INTO Person (id,name,age) VALUES (?, ?, ?)", List( + List(1, "Joe", 123), + List(2, "Jill", 456) + ), Dynamic) } "update" in { - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].filter(pf => pf.id == p.id).update(_.name -> p.name, _.age -> p.age)) } - mirror.triple mustEqual ("UPDATE Person AS pf SET name = ?, age = ? WHERE pf.id = ?", List(List("Joe", 123, 1), List("Jill", 456, 2)), Static) + val mirror = ctx.run { + liftQuery(people).foreach(p => + query[Person].filter(pf => pf.id == p.id).update(_.name -> p.name, _.age -> p.age) + ) + } + mirror.triple mustEqual ("UPDATE Person AS pf SET name = ?, age = ? WHERE pf.id = ?", List( + List("Joe", 123, 1), + List("Jill", 456, 2) + ), Static) } "update - object with meta" in { inline given UpdateMeta[Person] = updateMeta(_.id) - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].filter(pf => pf.id == p.id).updateValue(p)) } + val mirror = ctx.run(liftQuery(people).foreach(p => query[Person].filter(pf => pf.id == p.id).updateValue(p))) mirror.triple mustEqual ( "UPDATE Person AS pf SET name = ?, age = ? WHERE pf.id = ?", List(List("Joe", 123, 1), List("Jill", 456, 2)), @@ -242,7 +356,7 @@ class BatchActionTest extends Spec with Inside with SuperContext[MirrorSqlDialec } "delete" in { - val mirror = ctx.run { liftQuery(people).foreach(p => query[Person].filter(pf => pf.id == p.id).delete) } + val mirror = ctx.run(liftQuery(people).foreach(p => query[Person].filter(pf => pf.id == p.id).delete)) mirror.triple mustEqual ("DELETE FROM Person AS pf WHERE pf.id = ?", List(List(1), List(2)), Static) } } diff --git a/quill-sql/src/test/scala/io/getquill/FlicersSpec.scala b/quill-sql/src/test/scala/io/getquill/FlicersSpec.scala index 9e565737c..413786ac8 100644 --- a/quill-sql/src/test/scala/io/getquill/FlicersSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/FlicersSpec.scala @@ -33,7 +33,7 @@ class FlicersSpec extends Spec { inline def q = quote { query[PersonFlat].filterByKeys(keys) } - val r = ctx.run(q) + val r = ctx.run(q) val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.firstName, p.lastName, p.age FROM PersonFlat p WHERE (p.firstName = ? OR ?) AND (p.lastName = ? OR ?) AND (p.age = ? OR ?)" lifts mustEqual List(Some("Joe"), false, None, true, Some(123), false) @@ -46,26 +46,46 @@ class FlicersSpec extends Spec { query[ManyTypes].filterByKeys(keys) } - case class ManyTypes(s: String, so: Option[String], i: Int, io: Option[Int], ld: LocalDate, ldo: Option[LocalDate]) + case class ManyTypes( + s: String, + so: Option[String], + i: Int, + io: Option[Int], + ld: LocalDate, + ldo: Option[LocalDate] + ) "Splice on an object multiple encoding types" in { - val keys = Map[String, Any]("s" -> "Joe", "so" -> "Joe", "i" -> 123, "io" -> 123, "ld" -> now, "ldo" -> now) - val r = ctx.run(q(keys)) + val keys = Map[String, Any]("s" -> "Joe", "so" -> "Joe", "i" -> 123, "io" -> 123, "ld" -> now, "ldo" -> now) + val r = ctx.run(q(keys)) val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.s, p.so, p.i, p.io, p.ld, p.ldo FROM ManyTypes p WHERE (p.s = ? OR ?) AND (p.so = ? OR ?) AND (p.i = ? OR ?) AND (p.io = ? OR ?) AND (p.ld = ? OR ?) AND (p.ldo = ? OR ?)" - lifts mustEqual List(Some("Joe"), false, Some("Joe"), false, Some(123), false, Some(123), false, Some(now), false, Some(now), false) + lifts mustEqual List( + Some("Joe"), + false, + Some("Joe"), + false, + Some(123), + false, + Some(123), + false, + Some(now), + false, + Some(now), + false + ) executionType mustEqual ExecutionType.Static } "Splice on an object multiple encoding types - missing Nones" in { - val keys = Map[String, Any]("s" -> "Joe", "i" -> 123, "ld" -> now) - val r = ctx.run(q(keys)) + val keys = Map[String, Any]("s" -> "Joe", "i" -> 123, "ld" -> now) + val r = ctx.run(q(keys)) val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.s, p.so, p.i, p.io, p.ld, p.ldo FROM ManyTypes p WHERE (p.s = ? OR ?) AND (p.so = ? OR ?) AND (p.i = ? OR ?) AND (p.io = ? OR ?) AND (p.ld = ? OR ?) AND (p.ldo = ? OR ?)" lifts mustEqual List(Some("Joe"), false, None, true, Some(123), false, None, true, Some(now), false, None, true) executionType mustEqual ExecutionType.Static } "Splice on an object multiple encoding types - missing All" in { - val keys = Map[String, Any]("s" -> "Joe", "i" -> 123, "ld" -> now) - val r = ctx.run(q(keys)) + val keys = Map[String, Any]("s" -> "Joe", "i" -> 123, "ld" -> now) + val r = ctx.run(q(keys)) val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.s, p.so, p.i, p.io, p.ld, p.ldo FROM ManyTypes p WHERE (p.s = ? OR ?) AND (p.so = ? OR ?) AND (p.i = ? OR ?) AND (p.io = ? OR ?) AND (p.ld = ? OR ?) AND (p.ldo = ? OR ?)" lifts mustEqual List(Some("Joe"), false, None, true, Some(123), false, None, true, Some(now), false, None, true) @@ -78,7 +98,7 @@ class FlicersSpec extends Spec { inline def q = quote { query[PersonFlatOpt].filterByKeys(keys) } - val r = ctx.run(q) + val r = ctx.run(q) val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.firstName, p.lastName, p.age FROM PersonFlatOpt p WHERE (p.firstName = ? OR ?) AND (p.lastName = ? OR ?) AND (p.age = ? OR ?)" lifts mustEqual List(Some("Joe"), false, None, true, Some(123), false) @@ -90,7 +110,7 @@ class FlicersSpec extends Spec { inline def q = quote { query[PersonNest].filterByKeys(keys) } - val r = ctx.run(q) + val r = ctx.run(q) val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.first, p.last, p.age FROM PersonNest p WHERE (p.first = ? OR ?) AND (p.last = ? OR ?) AND (p.age = ? OR ?)" lifts mustEqual List(Some("Joe"), false, None, true, Some(123), false) @@ -102,7 +122,7 @@ class FlicersSpec extends Spec { inline def q = quote { query[PersonNestOpt].filterByKeys(keys) } - val r = ctx.run(q) + val r = ctx.run(q) val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.first, p.last, p.age FROM PersonNestOpt p WHERE (p.first = ? OR ?) AND (p.last = ? OR ?) AND (p.age = ? OR ?)" lifts mustEqual List(Some("Joe"), false, None, true, Some(123), false) @@ -114,7 +134,7 @@ class FlicersSpec extends Spec { inline def q = quote { query[PersonNestOptField].filterByKeys(keys) } - val r = ctx.run(q) // + val r = ctx.run(q) // val (qry, lifts, executionType) = r.triple qry mustEqual "SELECT p.first, p.last, p.age FROM PersonNestOptField p WHERE (p.first = ? OR ?) AND (p.last = ? OR ?) AND (p.age = ? OR ?)" lifts mustEqual List(Some("Joe"), false, None, true, Some(123), false) diff --git a/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTest.scala b/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTest.scala index 14586e6aa..5ea710343 100644 --- a/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTest.scala +++ b/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTest.scala @@ -26,7 +26,7 @@ object StaticSealedTraitExample { sealed trait Shape object Shape { case class Square(width: Int, height: Int) extends Shape - case class Circle(radius: Int) extends Shape + case class Circle(radius: Int) extends Shape } } @@ -54,7 +54,7 @@ class GenericDecoderCoproductTest extends Spec { } "test coproduct type" in { - val s = MySession + val s = MySession val r1 = MyResult("tpe" -> "square", "radius" -> 890, "width" -> 123, "height" -> 456) autoDecoder[Shape](0, r1, s) mustEqual Shape.Square(123, 456) val r2 = MyResult("tpe" -> "circle", "radius" -> 890, "width" -> 123, "height" -> 456) diff --git a/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTestAdditional.scala b/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTestAdditional.scala index 39c89de63..daa555312 100644 --- a/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTestAdditional.scala +++ b/quill-sql/src/test/scala/io/getquill/GenericDecoderCoproductTestAdditional.scala @@ -25,11 +25,13 @@ import io.getquill.generic.DecodingType import io.getquill.generic.GenericNullChecker object GenericDecoderCoproductTestAdditional { - implicit inline def autoDecoder[T]: GenericDecoder[MyResult, MySession, T, DecodingType.Generic] = ${ GenericDecoder.summon[T, MyResult, MySession] } + implicit inline def autoDecoder[T]: GenericDecoder[MyResult, MySession, T, DecodingType.Generic] = ${ + GenericDecoder.summon[T, MyResult, MySession] + } sealed trait MySession { type BaseNullChecker = GenericNullChecker[MyResult, MySession] - type NullChecker = MirrorNullChecker + type NullChecker = MirrorNullChecker class MirrorNullChecker extends BaseNullChecker { override def apply(index: Int, row: MyResult): Boolean = row.nullAt(index) } @@ -38,10 +40,10 @@ object GenericDecoderCoproductTestAdditional { object MySession extends MySession case class MyResult(values: (String, Any)*) { - lazy val list = LinkedHashMap[String, Any](values.toList: _*) - def nullAt(i: Int) = list.values.toList(i) == null - def get(i: Int): String = list.values.toList(i).toString - def get(key: String): String = list.apply(key).toString + lazy val list = LinkedHashMap[String, Any](values.toList: _*) + def nullAt(i: Int) = list.values.toList(i) == null + def get(i: Int): String = list.values.toList(i).toString + def get(key: String): String = list.apply(key).toString def resolve(key: String): Int = list.keysIterator.toList.indexOf(key) } @@ -55,8 +57,7 @@ object GenericDecoderCoproductTestAdditional { // TODO automatically provide this in 'context' given res: GenericColumnResolver[MyResult] with { - def apply(resultRow: MyResult, columnName: String): Int = { + def apply(resultRow: MyResult, columnName: String): Int = resultRow.resolve(columnName) - } } } diff --git a/quill-sql/src/test/scala/io/getquill/GenericDecoderTest.scala b/quill-sql/src/test/scala/io/getquill/GenericDecoderTest.scala index e95bf3299..cd8d535a4 100644 --- a/quill-sql/src/test/scala/io/getquill/GenericDecoderTest.scala +++ b/quill-sql/src/test/scala/io/getquill/GenericDecoderTest.scala @@ -29,7 +29,8 @@ import io.getquill.context.mirror.MirrorSession class GenericDecoderTest extends Spec { import StaticEnumExample._ - val ctx = new MirrorContext[MirrorSqlDialect, Literal](MirrorSqlDialect, Literal) with MirrorColumnResolving[MirrorSqlDialect, Literal] + val ctx = new MirrorContext[MirrorSqlDialect, Literal](MirrorSqlDialect, Literal) + with MirrorColumnResolving[MirrorSqlDialect, Literal] import ctx.{given, _} case class Person(name: String, age: Int) @@ -44,9 +45,9 @@ class GenericDecoderTest extends Spec { } "test product type" in { - val s = MirrorSession.default - inline def q = quote { query[Shape].filter(s => s.id == 18) } - val result = ctx.run(q) + val s = MirrorSession.default + inline def q = quote(query[Shape].filter(s => s.id == 18)) + val result = ctx.run(q) val squareRow = Row("type" -> "square", "id" -> 18, "radius" -> 890, "width" -> 123, "height" -> 456) result.extractor(squareRow, s) mustEqual Shape.Square(18, 123, 456) @@ -59,16 +60,16 @@ class GenericDecoderTest extends Spec { val s = MirrorSession.default "test tuple type" in { - inline def q = quote { query[Person].map(p => (p.name, p.age)) } - val result = ctx.run(q) + inline def q = quote(query[Person].map(p => (p.name, p.age))) + val result = ctx.run(q) val tupleRow = Row("_1" -> "Joe", "_2" -> 123) result.extractor(tupleRow, s) mustEqual ("Joe", 123) } "test case class type" in { - inline def q = quote { query[Person] } - val result = ctx.run(q) + inline def q = quote(query[Person]) + val result = ctx.run(q) val tupleRow = Row("name" -> "Joe", "age" -> 123) result.extractor(tupleRow, s) mustEqual Person("Joe", 123) @@ -78,6 +79,6 @@ class GenericDecoderTest extends Spec { object StaticEnumExample { enum Shape(val id: Int) { case Square(override val id: Int, width: Int, height: Int) extends Shape(id) - case Circle(override val id: Int, radius: Int) extends Shape(id) + case Circle(override val id: Int, radius: Int) extends Shape(id) } } diff --git a/quill-sql/src/test/scala/io/getquill/InsertLiftedSpec.scala b/quill-sql/src/test/scala/io/getquill/InsertLiftedSpec.scala index 88304626b..ce0e9e749 100644 --- a/quill-sql/src/test/scala/io/getquill/InsertLiftedSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/InsertLiftedSpec.scala @@ -13,7 +13,7 @@ class InsertLiftedSpec extends Spec { case class Name(first: First, last: String) case class Person(name: Name, age: Int) - inline def q = quote { query[Person].insertValue(lift(Person(Name(First("Joe"), "Bloggs"), 123))) } + inline def q = quote(query[Person].insertValue(lift(Person(Name(First("Joe"), "Bloggs"), 123)))) ctx.run(q).triple mustEqual ( "INSERT INTO Person (value,last,age) VALUES (?, ?, ?)", List("Joe", "Bloggs", 123), @@ -26,7 +26,7 @@ class InsertLiftedSpec extends Spec { case class Name(first: First, last: String) case class Person(name: Option[Name], age: Int) - inline def q = quote { query[Person].insertValue(lift(Person(Some(Name(First(Some("Joe")), "Bloggs")), 123))) } + inline def q = quote(query[Person].insertValue(lift(Person(Some(Name(First(Some("Joe")), "Bloggs")), 123)))) ctx.run(q).triple mustEqual ( "INSERT INTO Person (value,last,age) VALUES (?, ?, ?)", List(Some("Joe"), Some("Bloggs"), 123), @@ -39,7 +39,7 @@ class InsertLiftedSpec extends Spec { case class Name(first: First, last: String) case class Person(name: Option[Name], age: Int) - inline def q = quote { query[Person].insertValue(lift(Person(Some(Name(First("Joe"), "Bloggs")), 123))) } + inline def q = quote(query[Person].insertValue(lift(Person(Some(Name(First("Joe"), "Bloggs")), 123)))) ctx.run(q).triple mustEqual ( "INSERT INTO Person (value,last,age) VALUES (?, ?, ?)", List(Some("Joe"), Some("Bloggs"), 123), @@ -53,7 +53,9 @@ class InsertLiftedSpec extends Spec { case class Name(first: First, last: String) case class Person(name: Option[Name], age: Int) - inline def q = quote { query[Person].insertValue(lift(Person(Some(Name(First(Value(Some("Joe"))), "Bloggs")), 123))) } + inline def q = quote { + query[Person].insertValue(lift(Person(Some(Name(First(Value(Some("Joe"))), "Bloggs")), 123))) + } ctx.run(q).triple mustEqual ( "INSERT INTO Person (value,last,age) VALUES (?, ?, ?)", List(Some("Joe"), Some("Bloggs"), 123), diff --git a/quill-sql/src/test/scala/io/getquill/MappedEncodingSpec.scala b/quill-sql/src/test/scala/io/getquill/MappedEncodingSpec.scala index 953b4a77a..63e50dbc2 100644 --- a/quill-sql/src/test/scala/io/getquill/MappedEncodingSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/MappedEncodingSpec.scala @@ -13,7 +13,7 @@ class MirrorEncodingSpec extends Spec { implicit val encodeName: MappedEncoding[Name, String] = MappedEncoding[Name, String](_.value) implicit val decodeName: MappedEncoding[String, Name] = MappedEncoding[String, Name](str => Name(str)) - val name = Name("Joe") + val name = Name("Joe") val mirror = ctx.run(query[Person].filter(p => p.name == lift(name))) mirror.triple mustEqual ( ( @@ -30,7 +30,7 @@ class MirrorEncodingSpec extends Spec { implicit val encodeName: MappedEncoding[Name, String] = MappedEncoding[Name, String](_.value) implicit val decodeName: MappedEncoding[String, Name] = MappedEncoding[String, Name](str => Name(str)) - val name = Name("Joe") + val name = Name("Joe") val mirror = ctx.run(query[Person].filter(p => p.name == lift(name))) mirror.triple mustEqual ( ( @@ -67,7 +67,7 @@ class MirrorEncodingSpec extends Spec { case class Name(value: String) case class Person(name: Name, age: Int) - val name = Name("Joe") + val name = Name("Joe") val mirror = ctx.run(query[Person].filter(p => p.name == lift(name))) mirror.triple mustEqual ( ( diff --git a/quill-sql/src/test/scala/io/getquill/OptionalProductEncodingSpec.scala b/quill-sql/src/test/scala/io/getquill/OptionalProductEncodingSpec.scala index 960ba3fd9..397e506bd 100644 --- a/quill-sql/src/test/scala/io/getquill/OptionalProductEncodingSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/OptionalProductEncodingSpec.scala @@ -44,7 +44,7 @@ class OptionalProductEncodingSpec extends Spec { } "optional product with multiple nested optional embeds" in { - case class InnerName(title: Int, last: String) extends Embedded + case class InnerName(title: Int, last: String) extends Embedded case class Name(first: String, last: Option[InnerName]) extends Embedded case class Address(owner: Int, street: String) case class Person(id: Int, name: Option[Name]) diff --git a/quill-sql/src/test/scala/io/getquill/ParticularizationSpec.scala b/quill-sql/src/test/scala/io/getquill/ParticularizationSpec.scala index c934fee20..b65ef34aa 100644 --- a/quill-sql/src/test/scala/io/getquill/ParticularizationSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/ParticularizationSpec.scala @@ -34,15 +34,29 @@ class ParticularizationSpec extends Spec { ) } "works with list lift mixed" in { - inline def q = quote { query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) } + inline def q = quote { + query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) + } ctx.run(q).triple mustBe ( - "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = ? AND e.foo IN (?, ?) AND e.bar = ?", List("h", "a", "b", "t"), Static + "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = ? AND e.foo IN (?, ?) AND e.bar = ?", List( + "h", + "a", + "b", + "t" + ), Static ) } "works with list lift mixed - dynamic" in { - val q = quote { query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) } + val q = quote { + query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) + } ctx.run(q).triple mustBe ( - "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = ? AND e.foo IN (?, ?) AND e.bar = ?", List("h", "a", "b", "t"), Dynamic + "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = ? AND e.foo IN (?, ?) AND e.bar = ?", List( + "h", + "a", + "b", + "t" + ), Dynamic ) } } @@ -85,27 +99,41 @@ class ParticularizationSpec extends Spec { ) } "works with list lift" in { - inline def q = quote { query[Ent].filter(e => liftQuery(List("a", "b")).contains(e.foo)) } + inline def q = quote(query[Ent].filter(e => liftQuery(List("a", "b")).contains(e.foo))) ctx.run(q).triple mustBe ( "SELECT e.foo, e.bar FROM Ent e WHERE e.foo IN ($1, $2)", List("a", "b"), Static ) } "works with list lift - dynamic" in { - val q = quote { query[Ent].filter(e => liftQuery(List("a", "b")).contains(e.foo)) } + val q = quote(query[Ent].filter(e => liftQuery(List("a", "b")).contains(e.foo))) ctx.run(q).triple mustBe ( "SELECT e.foo, e.bar FROM Ent e WHERE e.foo IN ($1, $2)", List("a", "b"), Dynamic ) } "works with list lift mixed" in { - inline def q = quote { query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) } + inline def q = quote { + query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) + } ctx.run(q).triple mustBe ( - "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = $1 AND e.foo IN ($2, $3) AND e.bar = $4", List("h", "a", "b", "t"), Static + "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = $1 AND e.foo IN ($2, $3) AND e.bar = $4", List( + "h", + "a", + "b", + "t" + ), Static ) } "works with list lift mixed - dynamic" in { - val q = quote { query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) } + val q = quote { + query[Ent].filter(e => e.bar == lift("h") && liftQuery(List("a", "b")).contains(e.foo) && e.bar == lift("t")) + } ctx.run(q).triple mustBe ( - "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = $1 AND e.foo IN ($2, $3) AND e.bar = $4", List("h", "a", "b", "t"), Dynamic + "SELECT e.foo, e.bar FROM Ent e WHERE e.bar = $1 AND e.foo IN ($2, $3) AND e.bar = $4", List( + "h", + "a", + "b", + "t" + ), Dynamic ) } } diff --git a/quill-sql/src/test/scala/io/getquill/QueryMetaTest.scala b/quill-sql/src/test/scala/io/getquill/QueryMetaTest.scala index f71d5fd72..d9e334abf 100644 --- a/quill-sql/src/test/scala/io/getquill/QueryMetaTest.scala +++ b/quill-sql/src/test/scala/io/getquill/QueryMetaTest.scala @@ -21,14 +21,14 @@ class QueryMetaTest extends Spec with Inside { "static meta - new style" - { inline given QueryMeta[PersonName, String] = queryMeta( - quote { - (q: Query[PersonName]) => q.map(p => p.name) + quote { (q: Query[PersonName]) => + q.map(p => p.name) } )((name: String) => PersonName(name)) "static query" in { - inline def people = quote { query[PersonName] } - val result = ctx.run(people) + inline def people = quote(query[PersonName]) + val result = ctx.run(people) result.string mustEqual """querySchema("PersonName").map(p => p.name)""" result.info.executionType mustEqual ExecutionType.Static } @@ -40,20 +40,20 @@ class QueryMetaTest extends Spec with Inside { inline given QueryMeta[Vip, Person] = queryMeta( - quote { - (q: Query[Vip]) => q.map(v => Person(v.name, v.age, "VeryImportant")) + quote { (q: Query[Vip]) => + q.map(v => Person(v.name, v.age, "VeryImportant")) } )((p: Person) => Vip(p.name, p.age)) "static query" in { - inline def people = quote { query[Vip] } - val result = ctx.run(people) + inline def people = quote(query[Vip]) + val result = ctx.run(people) result.string mustEqual """querySchema("Vip").map(v => Person(name: v.name, age: v.age, importance: "VeryImportant"))""" result.info.executionType mustEqual ExecutionType.Static } "dynamic query" in { - val people = quote { query[Vip] } + val people = quote(query[Vip]) val result = ctx.run(people) result.string mustEqual """querySchema("Vip").map(v => Person(name: v.name, age: v.age, importance: "VeryImportant"))""" result.info.executionType mustEqual ExecutionType.Dynamic @@ -61,23 +61,22 @@ class QueryMetaTest extends Spec with Inside { } "static meta" - { - implicit inline def qm: QueryMeta[PersonName, String] = { // hellooo + implicit inline def qm: QueryMeta[PersonName, String] = // hellooo queryMeta[PersonName, String]( - quote { - (q: Query[PersonName]) => q.map(p => p.name) + quote { (q: Query[PersonName]) => + q.map(p => p.name) } )((name: String) => PersonName(name)) - } "static query should yield static result" in { - inline def people = quote { query[PersonName] } - val result = ctx.run(people) + inline def people = quote(query[PersonName]) + val result = ctx.run(people) result.string mustEqual """querySchema("PersonName").map(p => p.name)""" result.info.executionType mustEqual ExecutionType.Static } "dynamic query shuold yield dynamic requst" in { - val people = quote { query[PersonName] } + val people = quote(query[PersonName]) val result = ctx.run(people) // println("Result: " + result.string) // println("=== Result: " + io.getquill.util.Messages.qprint(result.info.ast)) @@ -87,23 +86,22 @@ class QueryMetaTest extends Spec with Inside { } "dynamic meta" - { - implicit val qm: QueryMeta[PersonName, String] = { + implicit val qm: QueryMeta[PersonName, String] = queryMeta[PersonName, String]( - quote { - (q: Query[PersonName]) => q.map(p => p.name) + quote { (q: Query[PersonName]) => + q.map(p => p.name) } )((name: String) => PersonName(name)) - } "static query" in { - inline def people: Quoted[Query[PersonName]] = quote { query[PersonName] } - val result = ctx.run[PersonName](people) + inline def people: Quoted[Query[PersonName]] = quote(query[PersonName]) + val result = ctx.run[PersonName](people) result.string mustEqual """querySchema("PersonName").map(p => p.name)""" result.info.executionType mustEqual ExecutionType.Dynamic } "dynamic query" in { - val people = quote { query[PersonName] } + val people = quote(query[PersonName]) val result = ctx.run(people) result.string mustEqual """querySchema("PersonName").map(p => p.name)""" result.info.executionType mustEqual ExecutionType.Dynamic diff --git a/quill-sql/src/test/scala/io/getquill/QuerySchemaTest.scala b/quill-sql/src/test/scala/io/getquill/QuerySchemaTest.scala index c5e6a8bf5..2e25e8221 100644 --- a/quill-sql/src/test/scala/io/getquill/QuerySchemaTest.scala +++ b/quill-sql/src/test/scala/io/getquill/QuerySchemaTest.scala @@ -35,48 +35,50 @@ class QuerySchemaTest extends Spec with Inside { // hello "schema meta" - { "custom" in { implicit inline def meta: SchemaMeta[TestEntity] = schemaMeta("test_entity", _.i -> "ii") - inline def q = quote(query[TestEntity]) + inline def q = quote(query[TestEntity]) q.ast.toString mustEqual """`querySchema`("test_entity", _.i -> "ii")""" ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.i -> "ii")""", ExecutionType.Static) } "custom-idiomatic" in { inline given sm: SchemaMeta[TestEntity] = schemaMeta("test_entity", _.i -> "ii") - inline def q = quote(query[TestEntity]) + inline def q = quote(query[TestEntity]) q.ast.toString mustEqual """`querySchema`("test_entity", _.i -> "ii")""" ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.i -> "ii")""", ExecutionType.Static) } // using dynamic SchemaMeta must be possible as well "custom dynamic-meta/static-query" in { implicit val meta: SchemaMeta[TestEntity] = schemaMeta[TestEntity]("test_entity", _.i -> "ii") - inline def q = quote(query[TestEntity]) + inline def q = quote(query[TestEntity]) ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.i -> "ii")""", ExecutionType.Dynamic) } "custom dynamic-meta/static-query - idiomatic" in { implicit val meta: SchemaMeta[TestEntity] = schemaMeta[TestEntity]("test_entity", _.i -> "ii") - inline def q = quote(query[TestEntity]) + inline def q = quote(query[TestEntity]) ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.i -> "ii")""", ExecutionType.Dynamic) } "custom dynamic meta with dynamic query" in { implicit val meta: SchemaMeta[TestEntity] = schemaMeta[TestEntity]("test_entity", _.i -> "ii") - def q = quote(query[TestEntity]) + def q = quote(query[TestEntity]) ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.i -> "ii")""", ExecutionType.Dynamic) } "custom dynamic and composition" in { implicit val meta: SchemaMeta[TestEntity] = schemaMeta[TestEntity]("test_entity", _.i -> "ii") - inline def q = quote(query[TestEntity].filter(e => e.i == 1)) - ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.i -> "ii").filter(e => e.i == 1)""", ExecutionType.Dynamic) + inline def q = quote(query[TestEntity].filter(e => e.i == 1)) + ctx + .run(q) + .strAndExec mustEqual ("""`querySchema`("test_entity", _.i -> "ii").filter(e => e.i == 1)""", ExecutionType.Dynamic) } "custom with embedded" in { case class Entity(emb: EmbValue) implicit inline def meta: SchemaMeta[Entity] = schemaMeta[Entity]("test_entity", _.emb.i -> "ii") - inline def q = quote(query[Entity]) + inline def q = quote(query[Entity]) q.ast.toString mustEqual """`querySchema`("test_entity", _.emb.i -> "ii")""" ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.emb.i -> "ii")""", ExecutionType.Static) } "custom with optional embedded" in { case class Entity(emb: Option[EmbValue]) implicit inline def meta: SchemaMeta[Entity] = schemaMeta[Entity]("test_entity", _.emb.map(_.i) -> "ii") - inline def q = quote(query[Entity]) + inline def q = quote(query[Entity]) q.ast.toString mustEqual """`querySchema`("test_entity", _.emb.i -> "ii")""" // TODO What's the AST for this? Why are parens around v making (v)? ctx.run(q).strAndExec mustEqual ("""`querySchema`("test_entity", _.emb.i -> "ii")""", ExecutionType.Static) diff --git a/quill-sql/src/test/scala/io/getquill/QuotationTest.scala b/quill-sql/src/test/scala/io/getquill/QuotationTest.scala index 695d15bce..56581e162 100644 --- a/quill-sql/src/test/scala/io/getquill/QuotationTest.scala +++ b/quill-sql/src/test/scala/io/getquill/QuotationTest.scala @@ -35,65 +35,77 @@ class QuotationTest extends Spec with Inside { case _ => None } } - val IdentP = Ident("p", quatOf[Person]) + val IdentP = Ident("p", quatOf[Person]) val PersonQuat = quatOf[Person].probit "compiletime quotation has correct ast for" - { "trivial whole-record select" in { - inline def q = quote { query[Person] } + inline def q = quote(query[Person]) q.ast mustEqual Entity("Person", List(), quatOf[Person].probit) } "single field mapping" in { - inline def q = quote { query[Person].map(p => p.name) } + inline def q = quote(query[Person].map(p => p.name)) q.ast mustEqual Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")) } "anonymous single field mapping" in { - inline def q = quote { query[Person].map(_.name) } - q.ast mustEqual Map(Entity("Person", List(), quatOf[Person].probit), Ident("x1", quatOf[Person]), Property(Ident("x1", quatOf[Person]), "name")) + inline def q = quote(query[Person].map(_.name)) + q.ast mustEqual Map( + Entity("Person", List(), quatOf[Person].probit), + Ident("x1", quatOf[Person]), + Property(Ident("x1", quatOf[Person]), "name") + ) } "splice into another quotation without quote" in { - inline def q = query[Person] - inline def qq = quote { q.map(p => p.name) } + inline def q = query[Person] + inline def qq = quote(q.map(p => p.name)) qq.ast mustEqual Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")) } "unquoted splice into another quotation" in { - inline def q = quote { query[Person] } - inline def qq = quote { q.map(p => p.name) } + inline def q = quote(query[Person]) + inline def qq = quote(q.map(p => p.name)) qq.ast mustEqual Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")) } "double unquoted splice into another quotation" in { - inline def q = quote { query[Person] } - inline def qq = quote { q.map(p => p.name) } - inline def qqq = quote { qq.map(s => s) } + inline def q = quote(query[Person]) + inline def qq = quote(q.map(p => p.name)) + inline def qqq = quote(qq.map(s => s)) qq.ast mustEqual Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")) - qqq.ast mustEqual Map(Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")), Ident("s"), Ident("s")) + qqq.ast mustEqual Map( + Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")), + Ident("s"), + Ident("s") + ) } "double splice into another quotation, middle not quoted" in { - inline def q = quote { query[Person] } - inline def qq = q.map(p => p.name) - inline def qqq = quote { qq.map(s => s) } + inline def q = quote(query[Person]) + inline def qq = q.map(p => p.name) + inline def qqq = quote(qq.map(s => s)) qq.ast mustEqual Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")) - qqq.ast mustEqual Map(Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")), Ident("s", quatOf[Person]), Ident("s", quatOf[Person])) + qqq.ast mustEqual Map( + Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")), + Ident("s", quatOf[Person]), + Ident("s", quatOf[Person]) + ) } "double unquoted splict with a lift" in { - inline def q = quote { query[Person] } - inline def qq = quote { q.map(p => p.name) } + inline def q = quote(query[Person]) + inline def qq = quote(q.map(p => p.name)) qq.ast mustEqual Map(Entity("Person", List(), quatOf[Person].probit), IdentP, Property(IdentP, "name")) val ctx = new MirrorContext(MirrorSqlDialect, Literal) // We only need a context to do lifts import ctx._ - inline def qqq = quote { qq.map(s => s + lift("hello")) } + inline def qqq = quote(qq.map(s => s + lift("hello"))) } "query with a lazy lift" in { - inline def q = quote { lazyLift("hello") } + inline def q = quote(lazyLift("hello")) q must matchPattern { case Quoted(scalarTag(tagUid), List(LazyPlanter("hello", vaseUid)), List()) if (tagUid == vaseUid) => } } "run lazy lift" in { case class Person(name: String) - inline def q = quote { query[Person].filter(p => p.name == lazyLift("Joe")) } - val ctx = new MirrorContext(MirrorSqlDialect, Literal) + inline def q = quote(query[Person].filter(p => p.name == lazyLift("Joe"))) + val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ ctx.run(q).triple mustEqual ( "SELECT p.name FROM Person p WHERE p.name = ?", List("Joe"), ExecutionType.Static @@ -102,18 +114,20 @@ class QuotationTest extends Spec with Inside { "two level query with a lift and plus operator" in { case class Address(street: String, zip: Int) extends Embedded case class Person(name: String, age: Int, address: Address) - inline def q = quote { query[Person] } - inline def qq = quote { q.map(p => p.name) } + inline def q = quote(query[Person]) + inline def qq = quote(q.map(p => p.name)) qq.ast mustEqual Map(Entity("Person", List(), PersonQuat), IdentP, Property(IdentP, "name")) // We only need a context to do lifts val ctx = new MirrorContext(PostgresDialect, Literal) import ctx._ - inline def qqq = quote { qq.map(s => s + lift("hello")) } + inline def qqq = quote(qq.map(s => s + lift("hello"))) qqq must matchPattern { case Quoted( Map(Map(Ent("Person"), IdentP, Property(Id("p"), "name")), Id("s"), Id("s") `(+)` scalarTag(tagUid)), - List(EagerPlanter("hello", encoder, planterUid)), // Compare encoders by ref since all mirror encoders are same case class + List( + EagerPlanter("hello", encoder, planterUid) + ), // Compare encoders by ref since all mirror encoders are same case class Nil ) if (tagUid == planterUid && encoder.eq(summon[Encoder[String]])) => } @@ -121,12 +135,12 @@ class QuotationTest extends Spec with Inside { "two level query with a two lifts and plus operator" in { case class Address(street: String, zip: Int) extends Embedded case class Person(name: String, age: Int, address: Address) - inline def q = quote { query[Person] } + inline def q = quote(query[Person]) val ctx = new MirrorContext(PostgresDialect, Literal) import ctx._ - inline def qq = quote { q.map(p => p.name + lift("how")) } + inline def qq = quote(q.map(p => p.name + lift("how"))) qq must matchPattern { case Quoted( Map(Entity("Person", List(), `PersonQuat`), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid)), @@ -135,10 +149,14 @@ class QuotationTest extends Spec with Inside { ) if (tuid == puid) => } - inline def qqq = quote { qq.map(s => s + lift("are you")) } + inline def qqq = quote(qq.map(s => s + lift("are you"))) qqq must matchPattern { case Quoted( - Map(Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid1)), Id("s"), Id("s") `(+)` scalarTag(tuid2)), + Map( + Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid1)), + Id("s"), + Id("s") `(+)` scalarTag(tuid2) + ), List(EagerPlanter("how", enc1, puid1), EagerPlanter("are you", enc2, puid2)), Nil ) if (tuid1 == puid1 && tuid2 == puid2 && enc1.eq(summon[Encoder[String]])) => @@ -147,11 +165,11 @@ class QuotationTest extends Spec with Inside { "three level query with a lazy-lift/eager-lift/lazy-lift, and plus operator" in { case class Address(street: String, zip: Int) extends Embedded case class Person(name: String, age: Int, address: Address) - inline def q = quote { query[Person] } - val ctx = new MirrorContext(PostgresDialect, Literal) + inline def q = quote(query[Person]) + val ctx = new MirrorContext(PostgresDialect, Literal) import ctx._ - inline def qq = quote { q.map(p => p.name + lazyLift("hello")) } + inline def qq = quote(q.map(p => p.name + lazyLift("hello"))) qq must matchPattern { case Quoted( Map(Entity("Person", List(), `PersonQuat`), IdentP, Property(IdentP, "name") `(+)` scalarTag(uid)), @@ -160,10 +178,14 @@ class QuotationTest extends Spec with Inside { ) if (uid == planterUid) => } - inline def qqq = quote { qq.map(s => s + lift("how") + lazyLift("are you")) } // hellooooooo + inline def qqq = quote(qq.map(s => s + lift("how") + lazyLift("are you"))) // hellooooooo qqq must matchPattern { case Quoted( - Map(Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid1)), Id("s"), Id("s") `(+)` scalarTag(tuid2) `(+)` scalarTag(tuid3)), + Map( + Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid1)), + Id("s"), + Id("s") `(+)` scalarTag(tuid2) `(+)` scalarTag(tuid3) + ), List(LazyPlanter("hello", puid1), EagerPlanter("how", encoder, puid2), LazyPlanter("are you", puid3)), Nil ) if (tuid1 == puid1 && tuid2 == puid2 && tuid3 == puid3 && encoder.eq(summon[Encoder[String]])) => @@ -173,12 +195,12 @@ class QuotationTest extends Spec with Inside { "runtime quotation has correct ast for" - { "simple one-level query with map" in { - val q = quote { query[Person].map(p => p.name) } + val q = quote(query[Person].map(p => p.name)) q.ast mustEqual Map(Entity("Person", List(), `PersonQuat`), IdentP, Property(IdentP, "name")) } "two-level query with map" in { - val q = quote { query[Person] } - val qq = quote { q.map(p => p.name) } + val q = quote(query[Person]) + val qq = quote(q.map(p => p.name)) qq must matchPattern { case Quoted( Map(QuotationTag(tagId), IdentP, Property(IdentP, "name")), @@ -191,7 +213,7 @@ class QuotationTest extends Spec with Inside { "lift" in { val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ - val q = quote { lift("hello") } + val q = quote(lift("hello")) q must matchPattern { case Quoted(scalarTag(tagUid), List(EagerPlanter("hello", encoder, vaseUid)), List()) if (tagUid == vaseUid) => } @@ -201,21 +223,27 @@ class QuotationTest extends Spec with Inside { "spliced lift" in { val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ - val q = quote { lift("hello") } - val qq = quote { q } + val q = quote(lift("hello")) + val qq = quote(q) qq must matchPattern { case Quoted( QuotationTag(quotationTagId), Nil, - List(QuotationVase(Quoted(scalarTag(scalarTagId), List(EagerPlanter("hello", encoder, planterId)), Nil), quotationVaseId)) - ) if (quotationTagId == quotationVaseId && scalarTagId == planterId && encoder.eq(summon[Encoder[String]])) => + List( + QuotationVase( + Quoted(scalarTag(scalarTagId), List(EagerPlanter("hello", encoder, planterId)), Nil), + quotationVaseId + ) + ) + ) + if (quotationTagId == quotationVaseId && scalarTagId == planterId && encoder.eq(summon[Encoder[String]])) => } List(Row("hello")) mustEqual q.encodeEagerLifts(Row(), MirrorSession.default) } "query with a lift and plus operator" in { val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ - inline def q = quote { query[Person].map(p => p.name + lift("hello")) } + inline def q = quote(query[Person].map(p => p.name + lift("hello"))) q must matchPattern { case Quoted( Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tagUid)), @@ -227,8 +255,8 @@ class QuotationTest extends Spec with Inside { "two-level query with a lift and plus operator" in { val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ - val q = quote { query[Person] } - val qq = quote { q.map(p => p.name + lift("hello")) } + val q = quote(query[Person]) + val qq = quote(q.map(p => p.name + lift("hello"))) qq must matchPattern { case Quoted( Map(QuotationTag(tid), IdentP, Property(IdentP, "name") `(+)` scalarTag(tagUid)), @@ -241,12 +269,12 @@ class QuotationTest extends Spec with Inside { "three level val query with a two lifts and plus operator" in { case class Address(street: String, zip: Int) extends Embedded case class Person(name: String, age: Int, address: Address) - val q = quote { query[Person] } + val q = quote(query[Person]) val ctx = new MirrorContext(PostgresDialect, Literal) import ctx._ - val qq = quote { q.map(p => p.name + lift("how")) } + val qq = quote(q.map(p => p.name + lift("how"))) qq must matchPattern { case Quoted( Map(QuotationTag(qid), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid)), @@ -255,20 +283,22 @@ class QuotationTest extends Spec with Inside { ) if (tuid == puid && qid == vid) => } - val qqq = quote { qq.map(s => s + lift("are you")) } + val qqq = quote(qq.map(s => s + lift("are you"))) val quat = quatOf[Person] qqq must matchPattern { case Quoted( Map(QuotationTag(qid2), Id("s"), Id("s") `(+)` scalarTag(tid2)), List(EagerPlanter("are you", enc2, pid2)), - List(QuotationVase( - Quoted( - Map(QuotationTag(qid), Id("p"), Property(Id("p"), "name") `(+)` scalarTag(tid)), - List(EagerPlanter("how", enc, pid)), - List(QuotationVase(Quoted(Ent("Person"), Nil, Nil), vid)) - ), - vid2 - )) + List( + QuotationVase( + Quoted( + Map(QuotationTag(qid), Id("p"), Property(Id("p"), "name") `(+)` scalarTag(tid)), + List(EagerPlanter("how", enc, pid)), + List(QuotationVase(Quoted(Ent("Person"), Nil, Nil), vid)) + ), + vid2 + ) + ) ) if (tid == pid && qid == vid && tid2 == pid2 && qid2 == vid2) => } // if this below tests fails, line error is 259 on scalatest, report as a bug? reproduce? @@ -282,8 +312,8 @@ class QuotationTest extends Spec with Inside { "runtime -> compile-time" in { val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ - val q = quote { query[Person] } - inline def qq = quote { q.map(p => p.name + lift("hello")) } + val q = quote(query[Person]) + inline def qq = quote(q.map(p => p.name + lift("hello"))) qq must matchPattern { case Quoted( Map(QuotationTag(tid), `IdentP`, Property(`IdentP`, "name") `(+)` scalarTag(tagUid)), @@ -298,8 +328,8 @@ class QuotationTest extends Spec with Inside { "compile-time -> runtime" in { val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ - inline def q = quote { query[Person] } - val qq = quote { q.map(p => p.name + lift("hello")) } + inline def q = quote(query[Person]) + val qq = quote(q.map(p => p.name + lift("hello"))) qq must matchPattern { case Quoted( Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tagUid)), @@ -315,12 +345,12 @@ class QuotationTest extends Spec with Inside { "compile-time -> runtime -> compile-time" in { case class Address(street: String, zip: Int) extends Embedded case class Person(name: String, age: Int, address: Address) - inline def q = quote { query[Person] } + inline def q = quote(query[Person]) val ctx = new MirrorContext(PostgresDialect, Literal) import ctx._ - val qq = quote { q.map(p => p.name + lift("how")) } + val qq = quote(q.map(p => p.name + lift("how"))) qq must matchPattern { case Quoted( Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid)), @@ -329,20 +359,22 @@ class QuotationTest extends Spec with Inside { ) if (tuid == puid) => } - inline def qqq = quote { qq.map(s => s + lift("are you")) } + inline def qqq = quote(qq.map(s => s + lift("are you"))) // Should not match this pattern, should be spliced directly from the inline def qqq must matchPattern { case Quoted( Map(QuotationTag(qid2), Id("s"), Id("s") `(+)` scalarTag(tid2)), List(EagerPlanter("are you", enc2, pid2)), - List(QuotationVase( - Quoted( - Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tid)), - List(EagerPlanter("how", enc1, pid)), - Nil - ), - vid2 - )) + List( + QuotationVase( + Quoted( + Map(Ent("Person"), IdentP, Property(IdentP, "name") `(+)` scalarTag(tid)), + List(EagerPlanter("how", enc1, pid)), + Nil + ), + vid2 + ) + ) ) if (pid == tid && tid2 == pid2 && qid2 == vid2) => } ctx.pull(qqq) mustEqual (List("how", "are you"), ExecutionType.Dynamic) @@ -351,12 +383,12 @@ class QuotationTest extends Spec with Inside { "runtime -> compile-time -> runtime" in { case class Address(street: String, zip: Int) extends Embedded case class Person(name: String, age: Int, address: Address) - val q = quote { query[Person] } + val q = quote(query[Person]) val ctx = new MirrorContext(PostgresDialect, Literal) import ctx._ - inline def qq = quote { q.map(p => p.name + lift("how")) } + inline def qq = quote(q.map(p => p.name + lift("how"))) qq must matchPattern { case Quoted( Map(QuotationTag(qid), IdentP, Property(IdentP, "name") `(+)` scalarTag(tuid)), @@ -365,20 +397,22 @@ class QuotationTest extends Spec with Inside { ) if (tuid == puid && qid == vid) => } - val qqq = quote { qq.map(s => s + lift("are you")) } + val qqq = quote(qq.map(s => s + lift("are you"))) // Should not match this pattern, should be spliced directly from the inline def qqq must matchPattern { case Quoted( Map(QuotationTag(qid2), Id("s"), Id("s") `(+)` scalarTag(tid2)), List(EagerPlanter("how", enc, pid), EagerPlanter("are you", enc2, pid2)), - List(QuotationVase( - Quoted( - Map(QuotationTag(qid), IdentP, Property(IdentP, "name") `(+)` scalarTag(tid)), - List(EagerPlanter("how", enc1, pid1)), - List(QuotationVase(Quoted(Ent("Person"), Nil, Nil), vid)) - ), - vid2 - )) + List( + QuotationVase( + Quoted( + Map(QuotationTag(qid), IdentP, Property(IdentP, "name") `(+)` scalarTag(tid)), + List(EagerPlanter("how", enc1, pid1)), + List(QuotationVase(Quoted(Ent("Person"), Nil, Nil), vid)) + ), + vid2 + ) + ) ) if (tid == pid && qid == vid && pid1 == pid && tid2 == pid2 && qid2 == vid2) => } ctx.pull(qqq) mustEqual (List("how", "are you"), ExecutionType.Dynamic) @@ -388,12 +422,12 @@ class QuotationTest extends Spec with Inside { "special cases" - { "lazy lift shuold crash dynamic query" in { case class Person(name: String, age: Int) - val q = quote { query[Person].map(p => p.name + lazyLift("hello")) } + val q = quote(query[Person].map(p => p.name + lazyLift("hello"))) val ctx = new MirrorContext(PostgresDialect, Literal) import ctx._ - assertThrows[IllegalArgumentException] { ctx.run(q) } + assertThrows[IllegalArgumentException](ctx.run(q)) } "pull quote from unavailable context - only inlines - with map" in { @@ -409,7 +443,7 @@ class QuotationTest extends Spec with Inside { } } } - inline def q = quote { new Outer().qqq } + inline def q = quote(new Outer().qqq) ctx.run(q).triple mustEqual ( "SELECT (p.name || ?) || ? FROM Person p", List("how", "are you"), ExecutionType.Static ) @@ -428,7 +462,7 @@ class QuotationTest extends Spec with Inside { } } } - inline def q = quote { new Outer().qqq } + inline def q = quote(new Outer().qqq) ctx.run(q).triple mustEqual ( "SELECT (p.name || ?) || ? FROM Person p", List("how", "are you"), ExecutionType.Static ) @@ -439,15 +473,15 @@ class QuotationTest extends Spec with Inside { import ctx._ class Outer { - inline def qqq = quote { new Inner().qq } + inline def qqq = quote(new Inner().qq) class Inner { - inline def qq = quote { new Core().q } + inline def qq = quote(new Core().q) class Core { - inline def q = quote { query[Person] } + inline def q = quote(query[Person]) } } } - inline def qry = quote { new Outer().qqq } + inline def qry = quote(new Outer().qqq) ctx.run(qry).triple mustEqual ( "SELECT x.name, x.age, x.street, x.zip FROM Person x", List(), ExecutionType.Static ) diff --git a/quill-sql/src/test/scala/io/getquill/Spec.scala b/quill-sql/src/test/scala/io/getquill/Spec.scala index c12a00cbd..c51b2f65a 100644 --- a/quill-sql/src/test/scala/io/getquill/Spec.scala +++ b/quill-sql/src/test/scala/io/getquill/Spec.scala @@ -16,13 +16,14 @@ import io.getquill.Query import io.getquill.context.mirror.Row abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { - val QV = Quat.Value - def QEP(name: String) = Quat.Product.empty(name) + val QV = Quat.Value + def QEP(name: String) = Quat.Product.empty(name) def QP(name: String, fields: String*) = Quat.LeafProduct(name, fields: _*) extension (m: MirrorContextBase[_, _]#BatchActionReturningMirror[_]) { def triple = { - if (m.groups.length != 1) fail(s"Expected all batch groups per design to only have one root element but has multiple ${m.groups}") + if (m.groups.length != 1) + fail(s"Expected all batch groups per design to only have one root element but has multiple ${m.groups}") val (queryString, returnAction, prepares) = m.groups(0) ( queryString, @@ -65,7 +66,8 @@ abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { extension (m: MirrorContextBase[_, _]#BatchActionMirror) { def triple = { - if (m.groups.length != 1) fail(s"Expected all batch groups per design to only have one root element but has multiple ${m.groups}") + if (m.groups.length != 1) + fail(s"Expected all batch groups per design to only have one root element but has multiple ${m.groups}") val (queryString, prepares) = m.groups(0) ( queryString, @@ -149,8 +151,8 @@ abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { extension [T, PrepareRow, Session](q: Quoted[T]) { def encodeEagerLifts(row: PrepareRow, session: Session) = - q.lifts.zipWithIndex.collect { - case (ep: EagerPlanter[String, PrepareRow, Session], idx) => ep.encoder(idx, ep.value, row, session) + q.lifts.zipWithIndex.collect { case (ep: EagerPlanter[String, PrepareRow, Session], idx) => + ep.encoder(idx, ep.value, row, session) } } @@ -161,11 +163,11 @@ abstract class Spec extends AnyFreeSpec with Matchers with BeforeAndAfterAll { object ShortAst { object Id { def apply(str: String, quat: Quat) = Ident(str, quat) - def unapply(id: Ident) = Some(id.name) + def unapply(id: Ident) = Some(id.name) } object Ent { def apply(name: String, quat: Quat.Product) = Entity(name, Nil, quat) - def unapply(entity: Entity) = Some(entity.name) + def unapply(entity: Entity) = Some(entity.name) } object `(+)` { def apply(a: Ast, b: Ast) = BinaryOperation(a, StringOperator.+, b) diff --git a/quill-sql/src/test/scala/io/getquill/StructuralTypeSpec.scala b/quill-sql/src/test/scala/io/getquill/StructuralTypeSpec.scala index 94738c38f..885b64a8b 100644 --- a/quill-sql/src/test/scala/io/getquill/StructuralTypeSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/StructuralTypeSpec.scala @@ -11,8 +11,8 @@ class StructuralTypeSpec extends Spec { import ctx._ "quoted function" in { - inline def filterByName[T <: { def name: String }] = quote { - (q: Query[T]) => q.filter(p => p.name == "Joe") + inline def filterByName[T <: { def name: String }] = quote { (q: Query[T]) => + q.filter(p => p.name == "Joe") } val mirror = ctx.run(filterByName(query[Person])) @@ -40,8 +40,8 @@ class StructuralTypeSpec extends Spec { } "quoted function dynamic" in { - def filterByName[T <: { def name: String }] = quote { - (q: Query[T]) => q.filter(p => p.name == "Joe") + def filterByName[T <: { def name: String }] = quote { (q: Query[T]) => + q.filter(p => p.name == "Joe") } val mirror = ctx.run(filterByName(query[Person])) diff --git a/quill-sql/src/test/scala/io/getquill/TestEntities.scala b/quill-sql/src/test/scala/io/getquill/TestEntities.scala index 1f2145ece..865973f4d 100644 --- a/quill-sql/src/test/scala/io/getquill/TestEntities.scala +++ b/quill-sql/src/test/scala/io/getquill/TestEntities.scala @@ -18,15 +18,16 @@ trait TestEntities { case class TestEntity4Emb(emb: EmbSingle) case class TestEntityRegular(s: String, i: Long) - private val QV = Quat.Value + private val QV = Quat.Value private val QBV = Quat.BooleanValue val TestEntityQuat = Quat.Product("TestEntity", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV, "b" -> QBV) - val TestEntityEmbQuat = Quat.Product("TestEntityEmb", "emb" -> Quat.Product("Emb", "s" -> QV, "i" -> QV), "l" -> QV, "o" -> QV) - val TestEntity2Quat = Quat.Product("TestEntity2", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV) - val TestEntity3Quat = Quat.Product("TestEntity3", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV) - val TestEntity4Quat = Quat.Product("TestEntity4", "i" -> QV) - val TestEntity5Quat = Quat.Product("TestEntity5", "i" -> QV, "s" -> QV) + val TestEntityEmbQuat = + Quat.Product("TestEntityEmb", "emb" -> Quat.Product("Emb", "s" -> QV, "i" -> QV), "l" -> QV, "o" -> QV) + val TestEntity2Quat = Quat.Product("TestEntity2", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV) + val TestEntity3Quat = Quat.Product("TestEntity3", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV) + val TestEntity4Quat = Quat.Product("TestEntity4", "i" -> QV) + val TestEntity5Quat = Quat.Product("TestEntity5", "i" -> QV, "s" -> QV) val TestEntity4EmbQuat = Quat.Product("TestEntity4Emb", "emb" -> Quat.Product("EmbSingle", "i" -> QV)) inline def qr1 = quote { diff --git a/quill-sql/src/test/scala/io/getquill/anyval/AnyValEncodingSpec.scala b/quill-sql/src/test/scala/io/getquill/anyval/AnyValEncodingSpec.scala index 318527562..95e9b2ad2 100644 --- a/quill-sql/src/test/scala/io/getquill/anyval/AnyValEncodingSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/anyval/AnyValEncodingSpec.scala @@ -14,7 +14,7 @@ class AnyValEncodingSpec extends Spec { "simple anyval should encode and decode" in { // val id = Rec(Blah("Joe", 123), "Bloggs") - val name = Name("Joe") + val name = Name("Joe") val mirror = ctx.run(query[Person].filter(p => p.name == lift(name))) println(mirror) } diff --git a/quill-sql/src/test/scala/io/getquill/ast/ActionAstSpec.scala b/quill-sql/src/test/scala/io/getquill/ast/ActionAstSpec.scala index c9381ff32..916407a0a 100644 --- a/quill-sql/src/test/scala/io/getquill/ast/ActionAstSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/ast/ActionAstSpec.scala @@ -31,12 +31,12 @@ class ActionAstSpec extends Spec with Inside { "simple (assignment based apis)" - { "simple insert" in {} "simple update Step 1" in { - inline def q = quote { query[Person].update(_.name -> lift("Joe"), _.age -> 123) } + inline def q = quote(query[Person].update(_.name -> lift("Joe"), _.age -> 123)) // println( io.getquill.util.Messages.qprint(q) ) // println( ctx.run(q) ) } "simple update Step 2" in { - inline def q = quote { query[Person].update(_.name -> lift("Joe"), _.age -> 123) } + inline def q = quote(query[Person].update(_.name -> lift("Joe"), _.age -> 123)) // println( io.getquill.util.Messages.qprint(q) ) q must matchPattern { case Quoted( diff --git a/quill-sql/src/test/scala/io/getquill/context/encoding/OptionalNestedSpec.scala b/quill-sql/src/test/scala/io/getquill/context/encoding/OptionalNestedSpec.scala index 84284cd4a..660d104b4 100644 --- a/quill-sql/src/test/scala/io/getquill/context/encoding/OptionalNestedSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/encoding/OptionalNestedSpec.scala @@ -17,16 +17,18 @@ trait OptionalNestedSpec extends Spec with BeforeAndAfterEach { case class LastNameAge(lastName: String, age: Int) case class Contact(firstName: String, opt: Option[LastNameAge], addressFk: Int) - inline def data = quote { query[Contact] } + inline def data = quote(query[Contact]) inline def `1.Ex1 - Not null inner product insert` = quote { - sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', 123, 444)".as[Insert[Contact]] + sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', 123, 444)" + .as[Insert[Contact]] } inline def `1.Ex1 - Not null inner product result` = Contact("Joe", Some(LastNameAge("Bloggs", 123)), 444) inline def `1.Ex2 - null inner product insert` = quote { - sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', null, null, null)".as[Insert[Contact]] + sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', null, null, null)" + .as[Insert[Contact]] } inline def `1.Ex2 - null inner product result` = Contact("Joe", None, 0) @@ -37,10 +39,11 @@ trait OptionalNestedSpec extends Spec with BeforeAndAfterEach { case class LastNameAge(lastName: String, age: Age) case class Contact(firstName: String, opt: Option[LastNameAge], addressFk: Int) - inline def data = quote { query[Contact] } + inline def data = quote(query[Contact]) inline def `2.Ex1 - not-null insert` = quote { - sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', 123, 444)".as[Insert[Contact]] + sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', 123, 444)" + .as[Insert[Contact]] } inline def `2.Ex1 - not-null result` = Contact("Joe", Some(LastNameAge("Bloggs", Age(Some(123)))), 444) @@ -52,7 +55,8 @@ trait OptionalNestedSpec extends Spec with BeforeAndAfterEach { Contact("Joe", None, 444) inline def `2.Ex3 - Null inner leaf insert` = quote { - sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', null, 444)".as[Insert[Contact]] + sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', null, 444)" + .as[Insert[Contact]] } inline def `2.Ex3 - Null inner leaf result` = Contact("Joe", Some(LastNameAge("Bloggs", Age(None))), 444) @@ -63,7 +67,7 @@ trait OptionalNestedSpec extends Spec with BeforeAndAfterEach { case class LastNameAge(lastName: String, age: Option[Age]) case class Contact(firstName: String, opt: Option[LastNameAge], addressFk: Int) - inline def data = quote { query[Contact] } + inline def data = quote(query[Contact]) inline def `3.Ex1 - Null inner product insert` = quote { sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', null, null, 444)".as[Insert[Contact]] @@ -72,7 +76,8 @@ trait OptionalNestedSpec extends Spec with BeforeAndAfterEach { Contact("Joe", None, 444) inline def `3.Ex2 - Null inner leaf insert` = quote { - sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', null, 444)".as[Insert[Contact]] + sql"insert into Contact (firstName, lastName, age, addressFk) values ('Joe', 'Bloggs', null, 444)" + .as[Insert[Contact]] } inline def `3.Ex2 - Null inner leaf result` = Contact("Joe", Some(LastNameAge("Bloggs", None)), 444) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala index ba6fb89f6..ca6db6981 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/ArrayOpsSpec.scala @@ -28,15 +28,15 @@ trait ArrayOpsSpec extends Spec { inline def idByContains(x: Int) = quote(entity.filter(_.numbers.contains(lift(x))).map(_.id)) inline def `Ex 1 return all` = quote(idByContains(1)) - val `Ex 1 expected` = List(1, 2, 3) + val `Ex 1 expected` = List(1, 2, 3) inline def `Ex 2 return 1` = quote(idByContains(3)) - val `Ex 2 expected` = List(1) + val `Ex 2 expected` = List(1) inline def `Ex 3 return 2,3` = quote(idByContains(4)) - val `Ex 3 expected` = List(2, 3) + val `Ex 3 expected` = List(2, 3) inline def `Ex 4 return empty` = quote(idByContains(10)) - val `Ex 4 expected` = Nil + val `Ex 4 expected` = Nil } } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/BatchUpdateValuesMirrorSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/BatchUpdateValuesMirrorSpec.scala index b40173af4..fa3879cbe 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/BatchUpdateValuesMirrorSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/BatchUpdateValuesMirrorSpec.scala @@ -9,13 +9,15 @@ import io.getquill.norm.EnableTrace import io.getquill.context.ExecutionType.Static /** - * Note that queries are a little different from Scala2-Quill BatchUpdateValuesMirrorSpec because - * ProtoQuill is a bit smarter in recornizing which ScalarLift tags are the the same so it doesn't - * need to copy as many `?` placeholders. + * Note that queries are a little different from Scala2-Quill + * BatchUpdateValuesMirrorSpec because ProtoQuill is a bit smarter in + * recornizing which ScalarLift tags are the the same so it doesn't need to copy + * as many `?` placeholders. */ class BatchUpdateValuesMirrorSpec extends BatchUpdateValuesSpec { - val context: SqlMirrorContext[PostgresDialect, Literal] = new SqlMirrorContext[PostgresDialect, Literal](PostgresDialect, Literal) + val context: SqlMirrorContext[PostgresDialect, Literal] = + new SqlMirrorContext[PostgresDialect, Literal](PostgresDialect, Literal) import context._ "Ex 1 - Simple Contact" in { @@ -123,7 +125,10 @@ class BatchUpdateValuesMirrorSpec extends BatchUpdateValuesSpec { context.run(update, 2).tripleBatchMulti mustEqual List( ( "UPDATE Contact AS p SET lastName = ps.lastName || ? FROM (VALUES (?, ?), (?, ?)) AS ps(firstName, lastName) WHERE p.firstName = ps.firstName AND (p.firstName = ? OR p.firstName = ?)", - List(List(" Jr.", "Joe", "BloggsU", "Jan", "RoggsU", "Joe", "Jan"), List(" Jr.", "James", "JonesU", "Dale", "DomesU", "Joe", "Jan")), + List( + List(" Jr.", "Joe", "BloggsU", "Jan", "RoggsU", "Joe", "Jan"), + List(" Jr.", "James", "JonesU", "Dale", "DomesU", "Joe", "Jan") + ), Static ), ( diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/BatchValuesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/BatchValuesSpec.scala index c7c25c0b9..a12e7b023 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/BatchValuesSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/BatchValuesSpec.scala @@ -18,43 +18,45 @@ trait BatchValuesSpec extends Spec with BeforeAndAfterEach { object `Ex 1 - Batch Insert Normal` { inline given InsertMeta[Product] = insertMeta(_.id) - val products = makeProducts(22) - val batchSize = 5 - inline def opExt = quote { - (transform: Insert[Product] => Insert[Product]) => - liftQuery(products).foreach(p => transform(query[Product].insertValue(p))) + val products = makeProducts(22) + val batchSize = 5 + inline def opExt = quote { (transform: Insert[Product] => Insert[Product]) => + liftQuery(products).foreach(p => transform(query[Product].insertValue(p))) } inline def op = quote { liftQuery(products).foreach(p => query[Product].insertValue(p)) } - inline def get = quote { query[Product] } - def result = products + inline def get = quote(query[Product]) + def result = products } object `Ex 2 - Batch Insert Returning` { val productsOriginal = makeProducts(20) // want to populate them from DB - val products = productsOriginal.map(p => p.copy(id = 0)) + val products = productsOriginal.map(p => p.copy(id = 0)) val expectedIds = productsOriginal.map(_.id) - val batchSize = 10 + val batchSize = 10 inline def op = quote { liftQuery(products).foreach(p => query[Product].insertValue(p).returningGenerated(p => p.id)) } - inline def get = quote { query[Product] } - def result = productsOriginal + inline def get = quote(query[Product]) + def result = productsOriginal } object `Ex 3 - Batch Insert Mixed` { - val products = makeProducts(20) + val products = makeProducts(20) val batchSize = 40 inline def op = quote { - liftQuery(products).foreach(p => query[Product].insert(_.id -> p.id, _.description -> lift("BlahBlah"), _.sku -> p.sku)) + liftQuery(products).foreach(p => + query[Product].insert(_.id -> p.id, _.description -> lift("BlahBlah"), _.sku -> p.sku) + ) } - inline def opExt = quote { - (transform: Insert[Product] => Insert[Product]) => - liftQuery(products).foreach(p => transform(query[Product].insert(_.id -> p.id, _.description -> lift("BlahBlah"), _.sku -> p.sku))) + inline def opExt = quote { (transform: Insert[Product] => Insert[Product]) => + liftQuery(products).foreach(p => + transform(query[Product].insert(_.id -> p.id, _.description -> lift("BlahBlah"), _.sku -> p.sku)) + ) } - inline def get = quote { query[Product] } - def result = products.map(_.copy(description = "BlahBlah")) + inline def get = quote(query[Product]) + def result = products.map(_.copy(description = "BlahBlah")) } } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/DepartmentsSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/DepartmentsSpec.scala index 4e7d5070c..8f9fb93dd 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/DepartmentsSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/DepartmentsSpec.scala @@ -16,8 +16,8 @@ trait DepartmentsSpec extends Spec { case class Task(emp: String, tsk: String) inline def departmentInsert = - quote { - (dpt: Department) => query[Department].insertValue(dpt) + quote { (dpt: Department) => + query[Department].insertValue(dpt) } val departmentEntries = @@ -29,8 +29,8 @@ trait DepartmentsSpec extends Spec { ) inline def employeeInsert = - quote { - (emp: Employee) => query[Employee].insertValue(emp) + quote { (emp: Employee) => + query[Employee].insertValue(emp) } val employeeEntries = @@ -44,8 +44,8 @@ trait DepartmentsSpec extends Spec { ) inline def taskInsert = - quote { - (tsk: Task) => query[Task].insertValue(tsk) + quote { (tsk: Task) => + query[Task].insertValue(tsk) } val taskEntries = @@ -64,21 +64,20 @@ trait DepartmentsSpec extends Spec { ) inline def `Example 8 expertise naive` = - quote { - (u: String) => - for { - d <- query[Department] if ( - (for { - e <- query[Employee] if ( - e.dpt == d.dpt && ( - for { - t <- query[Task] if (e.emp == t.emp && t.tsk == u) - } yield {} - ).isEmpty - ) - } yield {}).isEmpty - ) - } yield d.dpt + quote { (u: String) => + for { + d <- query[Department] if ( + (for { + e <- query[Employee] if ( + e.dpt == d.dpt && ( + for { + t <- query[Task] if (e.emp == t.emp && t.tsk == u) + } yield {} + ).isEmpty + ) + } yield {}).isEmpty + ) + } yield d.dpt } val `Example 8 param` = "abstract" @@ -117,25 +116,21 @@ trait DepartmentsSpec extends Spec { // TODO Typing error if add `quote` around this. Examine that more closely inline def all[T] = - (xs: Query[T]) => - (p: T => Boolean) => - !any(xs)(x => !p(x)) + (xs: Query[T]) => (p: T => Boolean) => !any(xs)(x => !p(x)) inline def contains[T] = quote { (xs: Query[T]) => (u: T) => any(xs)(x => x == u) } - inline def `Example 9 expertise` = { - quote { - (u: String) => - for { - (dpt, employees) <- nestedOrg if (all(employees) { case (emp, tasks) => contains(tasks)(u) }) - } yield { - dpt - } + inline def `Example 9 expertise` = + quote { (u: String) => + for { + (dpt, employees) <- nestedOrg if (all(employees) { case (emp, tasks) => contains(tasks)(u) }) + } yield { + dpt + } } - } val `Example 9 param` = "abstract" diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/DistinctSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/DistinctSpec.scala index 7e66153f6..91af6860e 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/DistinctSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/DistinctSpec.scala @@ -102,7 +102,8 @@ trait DistinctSpec extends Spec { query[Person] .join( query[Couple].map(_.him).distinct - ).on((p, cm) => p.name == cm) + ) + .on((p, cm) => p.name == cm) } val `Ex 5 Distinct Subquery with Map Single Field Result` = List( @@ -114,7 +115,8 @@ trait DistinctSpec extends Spec { query[Person] .join( query[Couple].map(c => (c.him, c.her)).distinct - ).on(_.name == _._1) + ) + .on(_.name == _._1) } val `Ex 6 Distinct Subquery with Map Multi Field Result` = List( @@ -130,7 +132,8 @@ trait DistinctSpec extends Spec { query[Person] .join( query[Couple].map(c => TwoField(c.him, c.her)).distinct - ).on(_.name == _.one) + ) + .on(_.name == _.one) } val `Ex 7 Distinct Subquery with Map Multi Field Tuple Result` = List( @@ -143,7 +146,8 @@ trait DistinctSpec extends Spec { inline def `Ex 8 Distinct With Sort` = quote { query[Person] - .join(query[Couple]).on(_.name == _.him) + .join(query[Couple]) + .on(_.name == _.him) .distinct .sortBy(_._1.name)(Ord.asc) } @@ -164,7 +168,8 @@ trait DistinctSpec extends Spec { inline def `Ex 10 DistinctOn With Applicative Join` = quote { query[Person] - .join(query[Couple]).on(_.name == _.him) + .join(query[Couple]) + .on(_.name == _.him) .distinctOn(_._1.name) .sortBy(_._1.name)(Ord.asc) .map(t => (t._1, t._2.him)) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/EncodingSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/EncodingSpec.scala index 93d88568b..3afb4db30 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/EncodingSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/EncodingSpec.scala @@ -18,30 +18,30 @@ trait EncodingSpec extends Spec { import context._ case class TimeEntity( - sqlDate: java.sql.Date, // DATE - sqlTime: java.sql.Time, // TIME - sqlTimestamp: java.sql.Timestamp, // DATETIME - timeLocalDate: java.time.LocalDate, // DATE - timeLocalTime: java.time.LocalTime, // TIME - timeLocalDateTime: java.time.LocalDateTime, // DATETIME - timeZonedDateTime: java.time.ZonedDateTime, // DATETIMEOFFSET - timeInstant: java.time.Instant, // DATETIMEOFFSET - timeOffsetTime: java.time.OffsetTime, // TIME - timeOffsetDateTime: java.time.OffsetDateTime // DATETIMEOFFSET + sqlDate: java.sql.Date, // DATE + sqlTime: java.sql.Time, // TIME + sqlTimestamp: java.sql.Timestamp, // DATETIME + timeLocalDate: java.time.LocalDate, // DATE + timeLocalTime: java.time.LocalTime, // TIME + timeLocalDateTime: java.time.LocalDateTime, // DATETIME + timeZonedDateTime: java.time.ZonedDateTime, // DATETIMEOFFSET + timeInstant: java.time.Instant, // DATETIMEOFFSET + timeOffsetTime: java.time.OffsetTime, // TIME + timeOffsetDateTime: java.time.OffsetDateTime // DATETIMEOFFSET ) { override def equals(other: Any): Boolean = other match { case t: TimeEntity => this.sqlDate == t.sqlDate && - this.sqlTime == t.sqlTime && - this.sqlTimestamp == t.sqlTimestamp && - this.timeLocalDate == t.timeLocalDate && - this.timeLocalTime == t.timeLocalTime && - this.timeLocalDateTime == t.timeLocalDateTime && - this.timeZonedDateTime.isEqual(t.timeZonedDateTime) && - this.timeInstant == t.timeInstant && - this.timeOffsetTime.isEqual(t.timeOffsetTime) && - this.timeOffsetDateTime.isEqual(t.timeOffsetDateTime) + this.sqlTime == t.sqlTime && + this.sqlTimestamp == t.sqlTimestamp && + this.timeLocalDate == t.timeLocalDate && + this.timeLocalTime == t.timeLocalTime && + this.timeLocalDateTime == t.timeLocalDateTime && + this.timeZonedDateTime.isEqual(t.timeZonedDateTime) && + this.timeInstant == t.timeInstant && + this.timeOffsetTime.isEqual(t.timeOffsetTime) && + this.timeOffsetDateTime.isEqual(t.timeOffsetDateTime) case _ => false } } @@ -50,11 +50,11 @@ trait EncodingSpec extends Spec { def make(zoneIdRaw: ZoneId) = { val zoneId = zoneIdRaw.normalized() // Millisecond precisions in SQL Server and many contexts are wrong so not using them - val nowInstant = LocalDateTime.of(2022, 1, 2, 3, 4, 6, 0).atZone(zoneId).toInstant + val nowInstant = LocalDateTime.of(2022, 1, 2, 3, 4, 6, 0).atZone(zoneId).toInstant val nowDateTime = LocalDateTime.ofInstant(nowInstant, zoneId) - val nowDate = nowDateTime.toLocalDate - val nowTime = nowDateTime.toLocalTime - val nowZoned = ZonedDateTime.of(nowDateTime, zoneId) + val nowDate = nowDateTime.toLocalDate + val nowTime = nowDateTime.toLocalTime + val nowZoned = ZonedDateTime.of(nowDateTime, zoneId) TimeEntity( java.sql.Date.valueOf(nowDate), java.sql.Time.valueOf(nowTime), @@ -71,43 +71,43 @@ trait EncodingSpec extends Spec { } case class EncodingTestEntity( - v1: String, - v2: BigDecimal, - v3: Boolean, - v4: Byte, - v5: Short, - v6: Int, - v7: Long, - v8: Float, - v9: Double, - v10: Array[Byte], - v11: Date, - v12: EncodingTestType, - v13: LocalDate, - v14: UUID, - o1: Option[String], - o2: Option[BigDecimal], - o3: Option[Boolean], - o4: Option[Byte], - o5: Option[Short], - o6: Option[Int], - o7: Option[Long], - o8: Option[Float], - o9: Option[Double], - o10: Option[Array[Byte]], - o11: Option[Date], - o12: Option[EncodingTestType], - o13: Option[LocalDate], - o14: Option[UUID], - o15: Option[Number] + v1: String, + v2: BigDecimal, + v3: Boolean, + v4: Byte, + v5: Short, + v6: Int, + v7: Long, + v8: Float, + v9: Double, + v10: Array[Byte], + v11: Date, + v12: EncodingTestType, + v13: LocalDate, + v14: UUID, + o1: Option[String], + o2: Option[BigDecimal], + o3: Option[Boolean], + o4: Option[Byte], + o5: Option[Short], + o6: Option[Int], + o7: Option[Long], + o8: Option[Float], + o9: Option[Double], + o10: Option[Array[Byte]], + o11: Option[Date], + o12: Option[EncodingTestType], + o13: Option[LocalDate], + o14: Option[UUID], + o15: Option[Number] ) inline def delete = quote { query[EncodingTestEntity].delete } - inline def insert = quote { - (e: EncodingTestEntity) => query[EncodingTestEntity].insertValue(e) + inline def insert = quote { (e: EncodingTestEntity) => + query[EncodingTestEntity].insertValue(e) } val insertValues = @@ -178,47 +178,46 @@ trait EncodingSpec extends Spec { def verify(result: List[EncodingTestEntity]) = { result.size mustEqual insertValues.size - result.zip(insertValues).foreach { - case (e1, e2) => - e1.v1 mustEqual e2.v1 - e1.v2 mustEqual e2.v2 - e1.v3 mustEqual e2.v3 - e1.v4 mustEqual e2.v4 - e1.v5 mustEqual e2.v5 - e1.v6 mustEqual e2.v6 - e1.v7 mustEqual e2.v7 - e1.v8 mustEqual e2.v8 - e1.v9 mustEqual e2.v9 - e1.v10 mustEqual e2.v10 - e1.v11 mustEqual e2.v11 - e1.v12 mustEqual e2.v12 - e1.v13 mustEqual e2.v13 - e1.v14 mustEqual e2.v14 - - e1.o1 mustEqual e2.o1 - e1.o2 mustEqual e2.o2 - e1.o3 mustEqual e2.o3 - e1.o4 mustEqual e2.o4 - e1.o5 mustEqual e2.o5 - e1.o6 mustEqual e2.o6 - e1.o7 mustEqual e2.o7 - e1.o8 mustEqual e2.o8 - e1.o9 mustEqual e2.o9 - // For Protoquill, array needs to be explicitly typed as [Byte] or "No ClassTag available for Nothing" error occurs - // TODO this should probably be mentioned in the Docs somewhere? Might have some user impact. - e1.o10.getOrElse(Array[Byte]()) mustEqual e2.o10.getOrElse(Array[Byte]()) - e1.o11 mustEqual e2.o11 - e1.o12 mustEqual e2.o12 - e1.o13 mustEqual e2.o13 - e1.o14 mustEqual e2.o14 - e1.o15 mustEqual e2.o15 + result.zip(insertValues).foreach { case (e1, e2) => + e1.v1 mustEqual e2.v1 + e1.v2 mustEqual e2.v2 + e1.v3 mustEqual e2.v3 + e1.v4 mustEqual e2.v4 + e1.v5 mustEqual e2.v5 + e1.v6 mustEqual e2.v6 + e1.v7 mustEqual e2.v7 + e1.v8 mustEqual e2.v8 + e1.v9 mustEqual e2.v9 + e1.v10 mustEqual e2.v10 + e1.v11 mustEqual e2.v11 + e1.v12 mustEqual e2.v12 + e1.v13 mustEqual e2.v13 + e1.v14 mustEqual e2.v14 + + e1.o1 mustEqual e2.o1 + e1.o2 mustEqual e2.o2 + e1.o3 mustEqual e2.o3 + e1.o4 mustEqual e2.o4 + e1.o5 mustEqual e2.o5 + e1.o6 mustEqual e2.o6 + e1.o7 mustEqual e2.o7 + e1.o8 mustEqual e2.o8 + e1.o9 mustEqual e2.o9 + // For Protoquill, array needs to be explicitly typed as [Byte] or "No ClassTag available for Nothing" error occurs + // TODO this should probably be mentioned in the Docs somewhere? Might have some user impact. + e1.o10.getOrElse(Array[Byte]()) mustEqual e2.o10.getOrElse(Array[Byte]()) + e1.o11 mustEqual e2.o11 + e1.o12 mustEqual e2.o12 + e1.o13 mustEqual e2.o13 + e1.o14 mustEqual e2.o14 + e1.o15 mustEqual e2.o15 } } case class BarCode(description: String, uuid: Option[UUID] = None) val insertBarCode = quote((b: BarCode) => query[BarCode].insertValue(b).returningGenerated(_.uuid)) - val barCodeEntry = BarCode("returning UUID") + val barCodeEntry = BarCode("returning UUID") inline def findBarCodeByUuid(uuid: UUID) = quote(query[BarCode].filter(_.uuid.forall(_ == lift(uuid)))) diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/OptionQuerySpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/OptionQuerySpec.scala index b6a38fede..a658ec073 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/OptionQuerySpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/OptionQuerySpec.scala @@ -54,7 +54,9 @@ trait OptionQuerySpec extends Spec { ) inline def `Simple Map with Condition and GetOrElse` = quote { - query[Address].map(a => (a.street, a.otherExtraInfo.map(info => if (info == "something") "foo" else "bar").getOrElse("baz"))) + query[Address].map(a => + (a.street, a.otherExtraInfo.map(info => if (info == "something") "foo" else "bar").getOrElse("baz")) + ) } val `Simple Map with Condition and GetOrElse Result` = List( ("123 Fake Street", "foo"), @@ -74,8 +76,9 @@ trait OptionQuerySpec extends Spec { ) inline def `LeftJoin with FlatMap` = quote { - query[Contact].leftJoin(query[Address]).on((c, a) => c.addressFk.exists(_ == a.id)) - .map({ case (c, a) => (a.map(_.id), a.flatMap(_.otherExtraInfo)) }) + query[Contact].leftJoin(query[Address]).on((c, a) => c.addressFk.exists(_ == a.id)).map { case (c, a) => + (a.map(_.id), a.flatMap(_.otherExtraInfo)) + } } val `LeftJoin with FlatMap Result` = List( (Some(1), Some("something")), @@ -84,8 +87,9 @@ trait OptionQuerySpec extends Spec { ) inline def `LeftJoin with Flatten` = quote { - query[Contact].leftJoin(query[Address]).on((c, a) => c.addressFk.exists(_ == a.id)) - .map({ case (c, a) => (a.map(_.id), a.map(_.otherExtraInfo).flatten) }) + query[Contact].leftJoin(query[Address]).on((c, a) => c.addressFk.exists(_ == a.id)).map { case (c, a) => + (a.map(_.id), a.map(_.otherExtraInfo).flatten) + } } val `LeftJoin with Flatten Result` = List( (Some(1), Some("something")), @@ -94,8 +98,9 @@ trait OptionQuerySpec extends Spec { ) inline def `Map+getOrElse LeftJoin` = quote { - query[Contact].leftJoin(query[Address]).on((c, a) => c.addressFk.getOrElse(-1) == a.id) - .map({ case (c, a) => (a.map(_.id), a.flatMap(_.otherExtraInfo)) }) + query[Contact].leftJoin(query[Address]).on((c, a) => c.addressFk.getOrElse(-1) == a.id).map { case (c, a) => + (a.map(_.id), a.flatMap(_.otherExtraInfo)) + } } val `Map+getOrElse LeftJoin Result` = List( (Some(1), Some("something")), @@ -105,8 +110,8 @@ trait OptionQuerySpec extends Spec { case class NormalizedContact(name: String, addressFk: Option[Int]) - inline def normalizeAddress = quote { - (addressFk: Option[Int]) => addressFk.getOrElse(111) + inline def normalizeAddress = quote { (addressFk: Option[Int]) => + addressFk.getOrElse(111) } inline def `Option+Some+None Normalize` = quote { @@ -114,11 +119,11 @@ trait OptionQuerySpec extends Spec { val c2 = querySchema[HasAddressContact]("Contact").map(c => (c.firstName, Some(c.addressFk))) val c3 = query[Contact].map(c => (c.firstName, c.addressFk)) - val normalized = (c1 ++ c2 ++ c3).map({ case (name, address) => (name, normalizeAddress(address)) }) + val normalized = (c1 ++ c2 ++ c3).map { case (name, address) => (name, normalizeAddress(address)) } for { (name, addressFk) <- normalized - address <- query[Address] if address.id == addressFk + address <- query[Address] if address.id == addressFk } yield (name, address.street) } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/PeopleAggregationSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/PeopleAggregationSpec.scala index 9536e55a5..8e784020b 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/PeopleAggregationSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/PeopleAggregationSpec.scala @@ -64,9 +64,16 @@ trait PeopleAggregationSpec extends Spec { Need to look into this */ inline def get = quote { - query[Contact].map(c => Name(c.firstName, c.lastName, c.age)).groupByMap(p => p.first)(p => (p.first, max(p.age))).filter(t => t._1 == "Joe") + query[Contact] + .map(c => Name(c.firstName, c.lastName, c.age)) + .groupByMap(p => p.first)(p => (p.first, max(p.age))) + .filter(t => t._1 == "Joe") } - val expect = people.groupBy(_.firstName).map { case (name, people) => (name, people.map(_.age).max) }.filter(_._1 == "Joe").toSet + val expect = people + .groupBy(_.firstName) + .map { case (name, people) => (name, people.map(_.age).max) } + .filter(_._1 == "Joe") + .toSet } object `Ex 6 flatMap.groupByMap.map` { @@ -78,11 +85,10 @@ trait PeopleAggregationSpec extends Spec { } val expect = { val addressMaxAges = people.groupBy(_.addressFk).map { case (address, q) => (address, q.map(_.age).max) } - val addressList = addresses.map(a => (a.id, a)).toMap - addressMaxAges - .map { case (addressId, maxAge) => (addressList.get(addressId), (addressId, maxAge)) } - .collect { case (Some(addr), tup) => (addr, tup) } - .toSet + val addressList = addresses.map(a => (a.id, a)).toMap + addressMaxAges.map { case (addressId, maxAge) => (addressList.get(addressId), (addressId, maxAge)) }.collect { + case (Some(addr), tup) => (addr, tup) + }.toSet } } } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/PeopleReturningSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/PeopleReturningSpec.scala index 6f72bf6c3..9f710f9b6 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/PeopleReturningSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/PeopleReturningSpec.scala @@ -8,7 +8,13 @@ trait PeopleReturningSpec extends Spec with BeforeAndAfterEach { val context: SqlContext[_, _] import context._ - case class Contact(firstName: String, lastName: String, age: Int, addressFk: Int = 0, extraInfo: Option[String] = None) + case class Contact( + firstName: String, + lastName: String, + age: Int, + addressFk: Int = 0, + extraInfo: Option[String] = None + ) case class Product(id: Long, description: String, sku: Int) inline def peopleInsert = @@ -27,7 +33,7 @@ trait PeopleReturningSpec extends Spec with BeforeAndAfterEach { inline def op = quote { query[Product].insert(_.description -> lift(product.description), _.sku -> lift(product.sku)).returning(p => p.id) } - inline def get = quote { query[Product] } + inline def get = quote(query[Product]) def result(id: Long) = List(product.copy(id = id)) } @@ -35,15 +41,17 @@ trait PeopleReturningSpec extends Spec with BeforeAndAfterEach { inline def op = quote { query[Product].insert(_.description -> lift(product.description), _.sku -> lift(product.sku)).returning(p => p) } - inline def get = quote { query[Product] } + inline def get = quote(query[Product]) def result(newProduct: Product) = List(newProduct) } object `Ex 1 insert.returningMany(_.generatedColumn) mod` { inline def op = quote { - query[Product].insert(_.description -> lift(product.description), _.sku -> lift(product.sku)).returningMany(p => p.id) + query[Product] + .insert(_.description -> lift(product.description), _.sku -> lift(product.sku)) + .returningMany(p => p.id) } - inline def get = quote { query[Product] } + inline def get = quote(query[Product]) def result(id: Long) = List(product.copy(id = id)) } @@ -51,8 +59,8 @@ trait PeopleReturningSpec extends Spec with BeforeAndAfterEach { inline def op = quote { query[Contact].filter(p => p.firstName == "Joe").update(p => p.age -> (p.age + 1)).returningMany(p => p.lastName) } - val expect = people.filter(_.firstName == "Joe").map(_.lastName) - inline def get = quote { query[Contact] } + val expect = people.filter(_.firstName == "Joe").map(_.lastName) + inline def get = quote(query[Contact]) inline def result = people.map(p => if (p.firstName == "Joe") p.copy(age = p.age + 1) else p) } @@ -60,8 +68,8 @@ trait PeopleReturningSpec extends Spec with BeforeAndAfterEach { inline def op = quote { query[Contact].filter(p => p.firstName == "Joe").delete.returningMany(p => p) } - val expect = people.filter(p => p.firstName == "Joe") - inline def get = quote { query[Contact] } + val expect = people.filter(p => p.firstName == "Joe") + inline def get = quote(query[Contact]) inline def result = people.filterNot(p => p.firstName == "Joe") } @@ -70,10 +78,16 @@ trait PeopleReturningSpec extends Spec with BeforeAndAfterEach { query[Contact] .filter(p => p.firstName == "Joe") .update(p => p.age -> (p.age + 1)) - .returningMany(p => query[Contact].filter(cp => cp.firstName == p.firstName && cp.lastName == p.lastName).map(_.lastName).value.orNull) + .returningMany(p => + query[Contact] + .filter(cp => cp.firstName == p.firstName && cp.lastName == p.lastName) + .map(_.lastName) + .value + .orNull + ) } - val expect = List("A", "B") - inline def get = quote { query[Contact] } + val expect = List("A", "B") + inline def get = quote(query[Contact]) inline def result = people.map(p => if (p.firstName == "Joe") p.copy(age = p.age + 1) else p) } } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/PeopleSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/PeopleSpec.scala index bc2922b3b..371d632fa 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/PeopleSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/PeopleSpec.scala @@ -59,73 +59,68 @@ trait PeopleSpec extends Spec { } val `Ex 1 expected result` = List(("Alex", 5), ("Cora", 2)) - inline def `Ex 2 rangeSimple` = quote { - (a: Int, b: Int) => - for { - u <- query[Person] if (a <= u.age && u.age < b) - } yield { - u - } + inline def `Ex 2 rangeSimple` = quote { (a: Int, b: Int) => + for { + u <- query[Person] if (a <= u.age && u.age < b) + } yield { + u + } } - val `Ex 2 param 1` = 30 - val `Ex 2 param 2` = 40 + val `Ex 2 param 1` = 30 + val `Ex 2 param 2` = 40 val `Ex 2 expected result` = List(Person("Cora", 33), Person("Drew", 31)) inline def satisfies = - quote { - (p: Int => Boolean) => - for { - u <- query[Person] if (p(u.age)) - } yield { - u - } + quote { (p: Int => Boolean) => + for { + u <- query[Person] if (p(u.age)) + } yield { + u + } } inline def `Ex 3 satisfies` = quote(satisfies((x: Int) => 20 <= x && x < 30)) - val `Ex 3 expected result` = List(Person("Edna", 21)) + val `Ex 3 expected result` = List(Person("Edna", 21)) inline def `Ex 4 satisfies` = quote(satisfies((x: Int) => x % 2 == 0)) - val `Ex 4 expected result` = List(Person("Alex", 60), Person("Fred", 60)) + val `Ex 4 expected result` = List(Person("Alex", 60), Person("Fred", 60)) // TODO this one has to be dynamic because you can't have nested inlines // should look into how to make a static equivalent val `Ex 5 compose` = { - val range = quote { - (a: Int, b: Int) => - for { - u <- query[Person] if (a <= u.age && u.age < b) - } yield { - u - } + val range = quote { (a: Int, b: Int) => + for { + u <- query[Person] if (a <= u.age && u.age < b) + } yield { + u + } } - val ageFromName = quote { - (s: String) => - for { - u <- query[Person] if (s == u.name) - } yield { - u.age - } + val ageFromName = quote { (s: String) => + for { + u <- query[Person] if (s == u.name) + } yield { + u.age + } } - quote { - (s: String, t: String) => - for { - a <- ageFromName(s) - b <- ageFromName(t) - r <- range(a, b) - } yield { - r - } + quote { (s: String, t: String) => + for { + a <- ageFromName(s) + b <- ageFromName(t) + r <- range(a, b) + } yield { + r + } } } - val `Ex 5 param 1` = "Drew" - val `Ex 5 param 2` = "Bert" + val `Ex 5 param 1` = "Drew" + val `Ex 5 param 2` = "Bert" val `Ex 5 expected result` = List(Person("Cora", 33), Person("Drew", 31)) sealed trait Predicate - case class Above(i: Int) extends Predicate - case class Below(i: Int) extends Predicate + case class Above(i: Int) extends Predicate + case class Below(i: Int) extends Predicate case class And(a: Predicate, b: Predicate) extends Predicate - case class Or(a: Predicate, b: Predicate) extends Predicate - case class Not(p: Predicate) extends Predicate + case class Or(a: Predicate, b: Predicate) extends Predicate + case class Not(p: Predicate) extends Predicate // TODO Leaving this dynamic for now. Should look into a static variant later def eval(t: Predicate): Quoted[Int => Boolean] = @@ -137,22 +132,21 @@ trait PeopleSpec extends Spec { case Not(t0) => quote((x: Int) => !eval(t0)(x)) } - val `Ex 6 predicate` = And(Above(30), Below(40)) + val `Ex 6 predicate` = And(Above(30), Below(40)) val `Ex 6 expected result` = List(Person("Cora", 33), Person("Drew", 31)) - val `Ex 7 predicate` = Not(Or(Below(20), Above(30))) + val `Ex 7 predicate` = Not(Or(Below(20), Above(30))) val `Ex 7 expected result` = List(Person("Edna", 21)) inline def `Ex 8 and 9 contains` = - quote { - (set: Query[Int]) => - query[Person].filter(p => set.contains(p.age)) + quote { (set: Query[Int]) => + query[Person].filter(p => set.contains(p.age)) } - val `Ex 8 param` = Set.empty[Int] + val `Ex 8 param` = Set.empty[Int] val `Ex 8 expected result` = List.empty[Person] - val `Ex 9 param` = Set(55, 33) + val `Ex 9 param` = Set(55, 33) val `Ex 9 expected result` = List(Person("Bert", 55), Person("Cora", 33)) inline def `Ex 10 page 1 query` = quote { diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/ProductSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/ProductSpec.scala index 9e1b727aa..b3093e462 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/ProductSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/ProductSpec.scala @@ -18,16 +18,16 @@ trait ProductSpec extends Spec { query[Product] } - inline def productInsert = quote { - (p: Product) => query[Product].insertValue(p).returningGenerated(_.id) + inline def productInsert = quote { (p: Product) => + query[Product].insertValue(p).returningGenerated(_.id) } - inline def productInsertBatch = quote { - (b: Query[Product]) => b.foreach(p => productInsert.apply(p)) + inline def productInsertBatch = quote { (b: Query[Product]) => + b.foreach(p => productInsert.apply(p)) } - inline def productById = quote { - (id: Long) => product.filter(_.id == id) + inline def productById = quote { (id: Long) => + product.filter(_.id == id) } val productEntries = List( diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/QueryResultTypeSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/QueryResultTypeSpec.scala index 815d7c3e8..213d1e12a 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/QueryResultTypeSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/QueryResultTypeSpec.scala @@ -7,25 +7,25 @@ trait QueryResultTypeSpec extends ProductSpec { import context._ - inline def deleteAll = quote(query[Product].delete) - inline def selectAll = quote(query[Product]) - inline def map = quote(query[Product].map(_.id)) - inline def filter = quote(query[Product].filter(_ => true)) + inline def deleteAll = quote(query[Product].delete) + inline def selectAll = quote(query[Product]) + inline def map = quote(query[Product].map(_.id)) + inline def filter = quote(query[Product].filter(_ => true)) inline def withFilter = quote(query[Product].withFilter(_ => true)) - inline def sortBy = quote(query[Product].sortBy(_.id)(Ord.asc)) - inline def take = quote(query[Product].take(10)) - inline def drop = quote(query[Product].drop(1)) - inline def `++` = quote(query[Product] ++ query[Product]) - inline def unionAll = quote(query[Product].unionAll(query[Product])) - inline def union = quote(query[Product].union(query[Product])) - - inline def minExists = quote(query[Product].map(_.sku).min) + inline def sortBy = quote(query[Product].sortBy(_.id)(Ord.asc)) + inline def take = quote(query[Product].take(10)) + inline def drop = quote(query[Product].drop(1)) + inline def `++` = quote(query[Product] ++ query[Product]) + inline def unionAll = quote(query[Product].unionAll(query[Product])) + inline def union = quote(query[Product].union(query[Product])) + + inline def minExists = quote(query[Product].map(_.sku).min) inline def minNonExists = quote(query[Product].filter(_.id > 1000).map(_.sku).min) - inline def maxExists = quote(query[Product].map(_.sku).max) + inline def maxExists = quote(query[Product].map(_.sku).max) inline def maxNonExists = quote(query[Product].filter(_.id > 1000).map(_.sku).max) - inline def avgExists = quote(query[Product].map(_.sku).avg) + inline def avgExists = quote(query[Product].map(_.sku).avg) inline def avgNonExists = quote(query[Product].filter(_.id > 1000).map(_.sku).avg) - inline def productSize = quote(query[Product].size) + inline def productSize = quote(query[Product].size) inline def parametrizedSize = quote { (id: Long) => query[Product].filter(_.id == id).size } @@ -33,7 +33,7 @@ trait QueryResultTypeSpec extends ProductSpec { inline def join = quote(query[Product].join(query[Product]).on(_.id == _.id)) inline def nonEmpty = quote(query[Product].nonEmpty) - inline def isEmpty = quote(query[Product].isEmpty) + inline def isEmpty = quote(query[Product].isEmpty) inline def distinct = quote(query[Product].map(_.id).distinct) } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/Scala3FeaturesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/Scala3FeaturesSpec.scala index 4b882bd59..c2f377979 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/Scala3FeaturesSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/Scala3FeaturesSpec.scala @@ -28,12 +28,12 @@ class Scala3FeaturesSpec extends Spec { sealed trait Filter object Filter { - case class ByName(name: String) extends Filter + case class ByName(name: String) extends Filter case class ByAge(from: Int, to: Int) extends Filter } enum FilterEnum { - case ByName(name: String) extends FilterEnum + case ByName(name: String) extends FilterEnum case ByAge(from: Int, to: Int) extends FilterEnum } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/TestContextTemplate.scala b/quill-sql/src/test/scala/io/getquill/context/sql/TestContextTemplate.scala index 28159438a..1db4e4af4 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/TestContextTemplate.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/TestContextTemplate.scala @@ -27,8 +27,8 @@ class TestContextTemplate[+Dialect <: SqlIdiom, +Naming <: NamingStrategy](diale trait UpperCaseNonDefault extends NamingStrategy { override def column(s: String): String = s.toUpperCase - override def table(s: String): String = s.toUpperCase - override def default(s: String) = s + override def table(s: String): String = s.toUpperCase + override def default(s: String) = s } //object UpperCaseNonDefault extends getquill.UpperCaseNonDefault diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/TestDecoders.scala b/quill-sql/src/test/scala/io/getquill/context/sql/TestDecoders.scala index b9fa483bf..f6ee1c133 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/TestDecoders.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/TestDecoders.scala @@ -19,7 +19,8 @@ trait TestDecoders { MappedEncoding[String, EncodingTestType].apply(EncodingTestType.apply) implicit val nameDecoder: MappedEncoding[String, Number] = MappedEncoding[String, Number].apply(s => - Number.withValidation(s) + Number + .withValidation(s) .getOrElse(throw new Exception(s"Illegal number $s")) ) } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/TestEncoders.scala b/quill-sql/src/test/scala/io/getquill/context/sql/TestEncoders.scala index 9426062f6..c17bf1ef3 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/TestEncoders.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/TestEncoders.scala @@ -4,6 +4,7 @@ import io.getquill.MappedEncoding trait TestEncoders { // In Dotty implicit vals need to be typed so needed to add type annotations here that are not present in Scala2-Quill - implicit val encodingTestTypeEncoder: MappedEncoding[EncodingTestType, String] = MappedEncoding[EncodingTestType, String](_.value) + implicit val encodingTestTypeEncoder: MappedEncoding[EncodingTestType, String] = + MappedEncoding[EncodingTestType, String](_.value) implicit val nameEncoder: MappedEncoding[Number, String] = MappedEncoding[Number, String](_.value) } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/base/BatchUpdateValuesSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/base/BatchUpdateValuesSpec.scala index 0b1e5cbe9..34dfa9e22 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/base/BatchUpdateValuesSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/base/BatchUpdateValuesSpec.scala @@ -23,8 +23,8 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { ContactBase("Caboose", "Castle", 66), ContactBase("E", "E", 111) ) - val updatePeople = List("Joe", "Jan", "James", "Dale", "Caboose") - def includeInUpdate(name: String): Boolean = updatePeople.contains(name) + val updatePeople = List("Joe", "Jan", "James", "Dale", "Caboose") + def includeInUpdate(name: String): Boolean = updatePeople.contains(name) def includeInUpdate(c: ContactBase): Boolean = includeInUpdate(c.firstName) val updateBase = dataBase.filter(includeInUpdate(_)).map(r => r.copy(lastName = r.lastName + "U")) @@ -39,8 +39,8 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { def adapt: List[Row] = list.map(makeData(_)) } lazy val updateData = updateBase.adapt - lazy val expect = expectBase.adapt - lazy val data = dataBase.adapt + lazy val expect = expectBase.adapt + lazy val data = dataBase.adapt } object `Ex 1 - Simple Contact` extends Adaptable { @@ -72,7 +72,7 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { query[Contact].filter(p => p.firstName == ps.firstName && p.firstName == lift("Joe")).updateValue(ps) ) } - inline def get = quote(query[Contact]) + inline def get = quote(query[Contact]) override lazy val expect = data.map(p => if (p.firstName == "Joe") p.copy(lastName = p.lastName + "U") else p) } @@ -92,7 +92,8 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { ) } inline def get = quote(query[Contact]) - override lazy val expect = data.map(p => if (p.firstName == "Joe" || p.firstName == "Jan") p.copy(lastName = p.lastName + "U Jr.") else p) + override lazy val expect = + data.map(p => if (p.firstName == "Joe" || p.firstName == "Jan") p.copy(lastName = p.lastName + "U Jr.") else p) } object `Ex 1.3 - Simple Contact with Multi-Lift-Kinds` extends Adaptable { @@ -106,12 +107,18 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { inline def update = quote { liftQuery(updateData: List[Contact]).foreach(ps => query[Contact] - .filter(p => p.firstName == ps.firstName && (p.firstName == lift("Joe") || liftQuery(List("Dale", "Caboose")).contains(p.firstName))) + .filter(p => + p.firstName == ps.firstName && (p.firstName == lift("Joe") || liftQuery(List("Dale", "Caboose")) + .contains(p.firstName)) + ) .updateValue(ps) ) } inline def get = quote(query[Contact]) - override lazy val expect = data.map(p => if (p.firstName == "Joe" || p.firstName == "Dale" || p.firstName == "Caboose") p.copy(lastName = p.lastName + "U") else p) + override lazy val expect = data.map(p => + if (p.firstName == "Joe" || p.firstName == "Dale" || p.firstName == "Caboose") p.copy(lastName = p.lastName + "U") + else p + ) } object `Ex 2 - Optional Embedded with Renames` extends Adaptable { @@ -139,12 +146,13 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { } object `Ex 3 - Deep Embedded Optional` extends Adaptable { - case class FirstName(firstName: Option[String]) extends Embedded - case class LastName(lastName: Option[String]) extends Embedded + case class FirstName(firstName: Option[String]) extends Embedded + case class LastName(lastName: Option[String]) extends Embedded case class Name(first: FirstName, last: LastName) extends Embedded case class Contact(name: Option[Name], age: Int) type Row = Contact - override def makeData(c: ContactBase): Contact = Contact(Some(Name(FirstName(Option(c.firstName)), LastName(Option(c.lastName)))), c.age) + override def makeData(c: ContactBase): Contact = + Contact(Some(Name(FirstName(Option(c.firstName)), LastName(Option(c.lastName)))), c.age) inline def insert = quote { liftQuery(data: List[Contact]).foreach(ps => query[Contact].insertValue(ps)) @@ -173,7 +181,7 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { ) } val expectedReturn = updateData.map(_.age) - inline def get = quote(query[Contact]) + inline def get = quote(query[Contact]) } object `Ex 4 - Returning Multiple` extends Adaptable { @@ -190,7 +198,7 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { ) } val expectedReturn = updateData.map(r => (r.lastName, r.age)) - inline def get = quote(query[Contact]) + inline def get = quote(query[Contact]) } object `Ex 5 - Append Data` extends Adaptable { @@ -212,15 +220,14 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { ) } - val expectSpecific = (data: List[Contact]) - .map(r => { - if (includeInUpdate(r.firstName)) { - // Not sure why the 1nd part i.e. _AA, _BB is not tacked on yet. Something odd about how postgres processes updates - // Note that this happens even with a batch-group-size of 1 - r.copy(firstName = s"${r.firstName}_A", lastName = s"${r.lastName}_B") - } else - r - }) + val expectSpecific = (data: List[Contact]).map { r => + if (includeInUpdate(r.firstName)) { + // Not sure why the 1nd part i.e. _AA, _BB is not tacked on yet. Something odd about how postgres processes updates + // Note that this happens even with a batch-group-size of 1 + r.copy(firstName = s"${r.firstName}_A", lastName = s"${r.lastName}_B") + } else + r + } inline def get = quote(query[Contact]) } @@ -237,13 +244,15 @@ trait BatchUpdateValuesSpec extends Spec with BeforeAndAfterEach { ) inline def update = quote { liftQuery(updateDataSpecific: List[Contact]).foreach(ps => - query[Contact].update(pa => pa.firstName -> (pa.firstName + ps.firstName), pb => pb.lastName -> (pb.lastName + ps.lastName)) + query[Contact] + .update(pa => pa.firstName -> (pa.firstName + ps.firstName), pb => pb.lastName -> (pb.lastName + ps.lastName)) ) } // Not sure why the 1nd part i.e. _AA, _BB is not tacked on yet. Something odd about how postgres processes updates // Note that this happens even with a batch-group-size of 1 - val expectSpecific = (data: List[Contact]).map(r => r.copy(firstName = s"${r.firstName}_A", lastName = s"${r.lastName}_B")) + val expectSpecific = + (data: List[Contact]).map(r => r.copy(firstName = s"${r.firstName}_A", lastName = s"${r.lastName}_B")) inline def get = quote(query[Contact]) } } diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingBaseSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingBaseSpec.scala index 3cd8a82e8..d6d78e985 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingBaseSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingBaseSpec.scala @@ -11,18 +11,18 @@ trait ArrayEncodingBaseSpec extends Spec with BeforeAndAfterEach { // Support all sql base types and `Seq` implementers case class ArraysTestEntity( - texts: List[String], - decimals: Seq[BigDecimal], - bools: Vector[Boolean], - bytes: List[Byte], - shorts: IndexedSeq[Short], - ints: Seq[Int], - longs: Seq[Long], - floats: Seq[Float], - doubles: Seq[Double], - timestamps: Seq[Date], - dates: Seq[LocalDate], - uuids: Seq[UUID] + texts: List[String], + decimals: Seq[BigDecimal], + bools: Vector[Boolean], + bytes: List[Byte], + shorts: IndexedSeq[Short], + ints: Seq[Int], + longs: Seq[Long], + floats: Seq[Float], + doubles: Seq[Double], + timestamps: Seq[Date], + dates: Seq[LocalDate], + uuids: Seq[UUID] ) val e = ArraysTestEntity( diff --git a/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingSpec.scala b/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingSpec.scala index cde313bb3..cf22e323d 100644 --- a/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/context/sql/encoding/ArrayEncodingSpec.scala @@ -15,8 +15,8 @@ class ArrayEncodingSpec extends Spec { object impl { implicit def encodeRaw[Col <: Seq[Raw]]: Encoder[Col] = encoder[Col] implicit def decodeRaw[Col <: Seq[Raw]]: Decoder[Col] = decoderUnsafe[Col] - implicit val encodeDecor: MappedEncoding[Decor, Raw] = MappedEncoding(_.raw) - implicit val decodeDecor: MappedEncoding[Raw, Decor] = MappedEncoding(Decor.apply) + implicit val encodeDecor: MappedEncoding[Decor, Raw] = MappedEncoding(_.raw) + implicit val decodeDecor: MappedEncoding[Raw, Decor] = MappedEncoding(Decor.apply) } "Provide array support with MappingEncoding" - { diff --git a/quill-sql/src/test/scala/io/getquill/customparser/CustomParser.scala b/quill-sql/src/test/scala/io/getquill/customparser/CustomParser.scala index bcac3188a..35b69b965 100644 --- a/quill-sql/src/test/scala/io/getquill/customparser/CustomParser.scala +++ b/quill-sql/src/test/scala/io/getquill/customparser/CustomParser.scala @@ -25,14 +25,13 @@ object CustomParser extends ParserLibrary { class CustomOperationsParser(rootParse: Parser)(using Quotes) extends Parser(rootParse) { import quotes.reflect._ import CustomOps._ - def attempt = { - case '{ ($i: Int) ** ($j: Int) } => - Infix( - List("power(", " ,", ")"), - List(rootParse(i), rootParse(j)), - true, - false, - Quat.Value - ) + def attempt = { case '{ ($i: Int) ** ($j: Int) } => + Infix( + List("power(", " ,", ")"), + List(rootParse(i), rootParse(j)), + true, + false, + Quat.Value + ) } } diff --git a/quill-sql/src/test/scala/io/getquill/idiom/LoadNamingTest.scala b/quill-sql/src/test/scala/io/getquill/idiom/LoadNamingTest.scala index 22dd4c087..69ab72a71 100644 --- a/quill-sql/src/test/scala/io/getquill/idiom/LoadNamingTest.scala +++ b/quill-sql/src/test/scala/io/getquill/idiom/LoadNamingTest.scala @@ -14,13 +14,11 @@ import io.getquill._ LoadNaming.mac(comp) } -@main def testLoadNaming() = { +@main def testLoadNaming() = println(macLoadNamingStrategy[Literal](Literal)) -} -@main def testLoadNamingInferred() = { +@main def testLoadNamingInferred() = println(macLoadNamingStrategy(Literal)) -} @main def testLoadNamingMulti() = { val comp = NamingStrategy(SnakeCase, UpperCase) diff --git a/quill-sql/src/test/scala/io/getquill/metaprog/StaticSpliceSpec.scala b/quill-sql/src/test/scala/io/getquill/metaprog/StaticSpliceSpec.scala index 1152a6759..fc8d3b76c 100644 --- a/quill-sql/src/test/scala/io/getquill/metaprog/StaticSpliceSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/metaprog/StaticSpliceSpec.scala @@ -10,27 +10,27 @@ class StaticSpliceSpec extends Spec { case class Person(name: String, age: Int) "simple string splice should work" in { - ctx.run { query[Person].filter(p => p.name == static(Mod.modVal)) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.modVal))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'modValValue'" - ctx.run { query[Person].filter(p => p.age == static(Mod.modIntVal)) }.string mustEqual + ctx.run(query[Person].filter(p => p.age == static(Mod.modIntVal))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.age = 123" - ctx.run { query[Person].filter(p => p.name == static(Mod.modDef)) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.modDef))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'modDefValue'" - ctx.run { query[Person].filter(p => p.name == static(Mod.modAp())) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.modAp()))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'modApValue'" - ctx.run { query[Person].filter(p => p.name == static(Mod.Foo.fooVal)) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.Foo.fooVal))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'fooValValue'" - ctx.run { query[Person].filter(p => p.name == static(Mod.Foo.fooDef)) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.Foo.fooDef))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'fooDefValue'" - ctx.run { query[Person].filter(p => p.name == static(Mod.Foo.fooAp())) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.Foo.fooAp()))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'fooApValue'" - ctx.run { query[Person].filter(p => p.name == static(Mod.Foo.Bar.barVal)) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.Foo.Bar.barVal))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'barValValue'" - ctx.run { query[Person].filter(p => p.name == static(Mod.Foo.Bar.barDef)) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.Foo.Bar.barDef))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'barDefValue'" - ctx.run { query[Person].filter(p => p.name == static(Mod.Foo.Bar.barAp())) }.string mustEqual + ctx.run(query[Person].filter(p => p.name == static(Mod.Foo.Bar.barAp()))).string mustEqual "SELECT p.name, p.age FROM Person p WHERE p.name = 'barApValue'" } } diff --git a/quill-sql/src/test/scala/io/getquill/parser/BooAstSerializerSpec.scala b/quill-sql/src/test/scala/io/getquill/parser/BooAstSerializerSpec.scala index d43ca5a3e..ce3555b47 100644 --- a/quill-sql/src/test/scala/io/getquill/parser/BooAstSerializerSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/parser/BooAstSerializerSpec.scala @@ -15,7 +15,8 @@ class BooSerializerSpec extends Spec { } "entity" in { - val v = new Entity("Person", List())(Quat.Product("Prod", "name" -> Quat.Value, "age" -> Quat.Value))(Renameable.neutral) + val v = + new Entity("Person", List())(Quat.Product("Prod", "name" -> Quat.Value, "age" -> Quat.Value))(Renameable.neutral) assert(repickle(v) == v) } @@ -38,22 +39,28 @@ class BooSerializerSpec extends Spec { } "insert" in { - val QV = Quat.Value - val QBV = Quat.BooleanValue + val QV = Quat.Value + val QBV = Quat.BooleanValue val TestEntityQuat = Quat.Product("Prod", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV, "b" -> QBV) - val v = Insert(Entity("TestEntity", Nil, TestEntityQuat), List(Assignment(Ident("t"), Property(Ident("t"), "s"), Constant.auto("s")))) + val v = Insert( + Entity("TestEntity", Nil, TestEntityQuat), + List(Assignment(Ident("t"), Property(Ident("t"), "s"), Constant.auto("s"))) + ) assert(repickle(v) == v) } "onConflict" in { import io.getquill.ast._ - val QV = Quat.Value - val QBV = Quat.BooleanValue - val TestEntityQuat = Quat.Product("Prod", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV, "b" -> QBV) + val QV = Quat.Value + val QBV = Quat.BooleanValue + val TestEntityQuat = Quat.Product("Prod", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV, "b" -> QBV) def IdT(name: String) = Ident(name, TestEntityQuat) val v = OnConflict( - Insert(Entity("TestEntity", Nil, TestEntityQuat), List(Assignment(Ident("t"), Property(Ident("t"), "s"), Constant.auto("s")))), + Insert( + Entity("TestEntity", Nil, TestEntityQuat), + List(Assignment(Ident("t"), Property(Ident("t"), "s"), Constant.auto("s"))) + ), OnConflict.NoTarget, OnConflict.Update( List( @@ -88,10 +95,10 @@ class BooSerializerSpec extends Spec { } "sortByWithEntity" in { - val QV = Quat.Value - val QBV = Quat.BooleanValue - val TestEntityQuat = Quat.Product("Prod", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV, "b" -> QBV) - def IdT(name: String) = Ident(name, TestEntityQuat) + val QV = Quat.Value + val QBV = Quat.BooleanValue + val TestEntityQuat = Quat.Product("Prod", "s" -> QV, "i" -> QV, "l" -> QV, "o" -> QV, "b" -> QBV) + def IdT(name: String) = Ident(name, TestEntityQuat) def PropT(id: String, prop: String) = Property(Ident(id, TestEntityQuat), prop) val v = diff --git a/quill-sql/src/test/scala/io/getquill/quat/QuatSpec.scala b/quill-sql/src/test/scala/io/getquill/quat/QuatSpec.scala index 36d959603..8980fe3b0 100644 --- a/quill-sql/src/test/scala/io/getquill/quat/QuatSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/quat/QuatSpec.scala @@ -39,13 +39,22 @@ class QuatSpec extends AnyFreeSpec { // quote(query[TestEnum.MyEnumContainer]) // } "should succeed product-type enum" in { - quatOf[TestEnum.ProductEnum] mustEqual Quat.Product("ProductEnum", "stuff" -> Quat.Value, "otherStuff" -> Quat.Value) + quatOf[TestEnum.ProductEnum] mustEqual Quat.Product( + "ProductEnum", + "stuff" -> Quat.Value, + "otherStuff" -> Quat.Value + ) } } "boolean and optional boolean" in { case class MyPerson(name: String, isHuman: Boolean, isRussian: Option[Boolean]) - val MyPersonQuat = Quat.Product("MyPersonQuat", "name" -> Quat.Value, "isHuman" -> Quat.BooleanValue, "isRussian" -> Quat.BooleanValue) + val MyPersonQuat = Quat.Product( + "MyPersonQuat", + "name" -> Quat.Value, + "isHuman" -> Quat.BooleanValue, + "isRussian" -> Quat.BooleanValue + ) MyPersonQuat mustEqual quatOf[MyPerson] quote(query[MyPerson]).ast.quat mustEqual MyPersonQuat } @@ -59,24 +68,29 @@ class QuatSpec extends AnyFreeSpec { "should support embedded" in { case class MyName(first: String, last: String) extends Embedded - case class MyPerson(name: MyName, age: Int) extends Embedded - val MyPersonQuat = Quat.Product("MyPersonQuat", "name" -> Quat.LeafProduct("MyName", "first", "last"), "age" -> Quat.Value) + case class MyPerson(name: MyName, age: Int) extends Embedded + val MyPersonQuat = + Quat.Product("MyPersonQuat", "name" -> Quat.LeafProduct("MyName", "first", "last"), "age" -> Quat.Value) MyPersonQuat mustEqual quatOf[MyPerson] quote(query[MyPerson]).ast.quat mustEqual MyPersonQuat } "should support multi-level embedded" in { case class MyName(first: String, last: String) extends Embedded - case class MyId(name: MyName, memberNum: Int) extends Embedded + case class MyId(name: MyName, memberNum: Int) extends Embedded case class MyPerson(name: MyId, age: Int) - val MyPersonQuat = Quat.Product("MyPersonQuat", "name" -> Quat.Product("Prod", "name" -> Quat.LeafProduct("MyName", "first", "last"), "memberNum" -> Quat.Value), "age" -> Quat.Value) + val MyPersonQuat = Quat.Product( + "MyPersonQuat", + "name" -> Quat.Product("Prod", "name" -> Quat.LeafProduct("MyName", "first", "last"), "memberNum" -> Quat.Value), + "age" -> Quat.Value + ) MyPersonQuat mustEqual quatOf[MyPerson] quote(query[MyPerson]).ast.quat mustEqual MyPersonQuat } "should support least upper types" - { val AnimalQuat = Quat.LeafProduct("Animal", "name") - val CatQuat = Quat.LeafProduct("Cat", "name", "color") + val CatQuat = Quat.LeafProduct("Cat", "name", "color") "simple reduction" in { AnimalQuat.leastUpperType(CatQuat).get mustEqual AnimalQuat @@ -87,8 +101,8 @@ class QuatSpec extends AnyFreeSpec { trait Animal { def name: String } case class Cat(name: String, color: Int) extends Animal - inline def isSpot[A <: Animal] = quote { - (animals: Query[A]) => animals.filter(a => a.name == "Spot") + inline def isSpot[A <: Animal] = quote { (animals: Query[A]) => + animals.filter(a => a.name == "Spot") } quote(isSpot[Cat](query[Cat])).ast.quat mustEqual CatQuat @@ -103,8 +117,8 @@ class QuatSpec extends AnyFreeSpec { } "lookup" - { - val bar = Quat.Product("bar", "baz" -> Quat.Value) - val foo = Quat.Product("foo", "v" -> Quat.Value, "bar" -> bar) + val bar = Quat.Product("bar", "baz" -> Quat.Value) + val foo = Quat.Product("foo", "v" -> Quat.Value, "bar" -> bar) val example = Quat.Product("example", "v" -> Quat.Value, "foo" -> foo) "path" in { example.lookup("foo", true) mustEqual foo @@ -128,15 +142,15 @@ class QuatSpec extends AnyFreeSpec { "Prod", "bv" -> Quat.BooleanValue, "be" -> Quat.BooleanExpression, - "v" -> Quat.Value, - "p" -> Quat.Product("Emb", "vv" -> Quat.Value, "pp" -> Quat.Product("EmbSingle", "ppp" -> Quat.Value)) + "v" -> Quat.Value, + "p" -> Quat.Product("Emb", "vv" -> Quat.Value, "pp" -> Quat.Product("EmbSingle", "ppp" -> Quat.Value)) ) val expect = Quat.Product( "expect", "bva" -> Quat.BooleanValue, - "be" -> Quat.BooleanExpression, - "v" -> Quat.Value, - "pa" -> Quat.Product("Emb", "vv" -> Quat.Value, "pp" -> Quat.Product("EmbSingle", "ppp" -> Quat.Value)) + "be" -> Quat.BooleanExpression, + "v" -> Quat.Value, + "pa" -> Quat.Product("Emb", "vv" -> Quat.Value, "pp" -> Quat.Product("EmbSingle", "ppp" -> Quat.Value)) ) val value = Quat.Value "rename field" in { @@ -150,7 +164,13 @@ class QuatSpec extends AnyFreeSpec { "should serialize" - { // Need to import implicits from BooQuatSerializer otherwise c_jl_UnsupportedOperationException happens in JS import BooQuatSerializer._ - val example = Quat.Product("Prod", "bv" -> Quat.BooleanValue, "be" -> Quat.BooleanExpression, "v" -> Quat.Value, "p" -> Quat.Product("EmbSingle", "vv" -> Quat.Value)) + val example = Quat.Product( + "Prod", + "bv" -> Quat.BooleanValue, + "be" -> Quat.BooleanExpression, + "v" -> Quat.Value, + "p" -> Quat.Product("EmbSingle", "vv" -> Quat.Value) + ) "with boo" in { Quat.fromSerialized(serialize(example)) mustEqual example } @@ -160,21 +180,21 @@ class QuatSpec extends AnyFreeSpec { "should support types in Query[T] position" - { "boolean value" in { // TODO Does the non-inline version of this work - inline def func = quote { - (q: Query[Boolean]) => q.filter(p => p == true) + inline def func = quote { (q: Query[Boolean]) => + q.filter(p => p == true) } func.ast.quat mustEqual Quat.BooleanValue } "boolean value - type" in { type Bool = Boolean - def func = quote { - (q: Query[Bool]) => q.filter(p => p == true) + def func = quote { (q: Query[Bool]) => + q.filter(p => p == true) } func.ast.quat mustEqual Quat.BooleanValue } "value" in { - def func = quote { - (q: Query[Int]) => q.filter(p => p == 1) + def func = quote { (q: Query[Int]) => + q.filter(p => p == 1) } func.ast.quat mustEqual Quat.Value } @@ -200,64 +220,64 @@ class QuatSpec extends AnyFreeSpec { */ "case class" in { case class MyPerson(name: String, isRussian: Boolean) - def func = quote { - (q: Query[MyPerson]) => q.filter(p => p.name == "Joe") + def func = quote { (q: Query[MyPerson]) => + q.filter(p => p.name == "Joe") } func.ast.quat mustEqual Quat.Product("Prod", "name" -> Quat.Value, "isRussian" -> Quat.BooleanValue) } "case class with boundary" in { case class MyPerson(name: String, isRussian: Boolean) - def func[T <: MyPerson] = quote { - (q: Query[T]) => q.filter(p => p.name == "Joe") + def func[T <: MyPerson] = quote { (q: Query[T]) => + q.filter(p => p.name == "Joe") } func.ast.quat mustEqual Quat.Generic } "interface" in { trait LikePerson { def name: String; def isRussian: Boolean } - def func = quote { - (q: Query[LikePerson]) => q.filter(p => p.name == "Joe") + def func = quote { (q: Query[LikePerson]) => + q.filter(p => p.name == "Joe") } // TODO What about abstract classes? What does Flags.Abstract do? func.ast.quat mustEqual Quat.Generic // helloooo } "interface with boundary" in { trait LikePerson { def name: String; def isRussian: Boolean } - def func[T <: LikePerson] = quote { - (q: Query[T]) => q.filter(p => p.name == "Joe") + def func[T <: LikePerson] = quote { (q: Query[T]) => + q.filter(p => p.name == "Joe") } func.ast.quat mustEqual Quat.Generic } "interface with boundary boolean indirect" in { type Bool = Boolean trait LikePerson { def name: String; def isRussian: Bool } - def func[T <: LikePerson] = quote { - (q: Query[T]) => q.filter(p => p.name == "Joe") + def func[T <: LikePerson] = quote { (q: Query[T]) => + q.filter(p => p.name == "Joe") } func.ast.quat mustEqual Quat.Generic } "boundary with value" in { - def func[T <: Int] = quote { - (q: Query[T]) => q + def func[T <: Int] = quote { (q: Query[T]) => + q } func.ast.quat mustEqual Quat.Value } "boundary with value - boolean" in { - def func[T <: Boolean] = quote { - (q: Query[T]) => q + def func[T <: Boolean] = quote { (q: Query[T]) => + q } func.ast.quat mustEqual Quat.BooleanValue } "boundary with value and type - boolean" in { type Bool = Boolean - def func[T <: Bool] = quote { - (q: Query[T]) => q + def func[T <: Bool] = quote { (q: Query[T]) => + q } func.ast.quat mustEqual Quat.BooleanValue } "any" in { // TODO Non Inline Version also once QuotationTag issue fixed - def func = quote { - (q: Query[Any]) => q + def func = quote { (q: Query[Any]) => + q } func.ast.quat mustEqual Quat.Generic } @@ -265,21 +285,21 @@ class QuatSpec extends AnyFreeSpec { "should support types" - { "boolean value" in { - def func = quote { - (q: Boolean) => q + def func = quote { (q: Boolean) => + q } func.ast.quat mustEqual Quat.BooleanValue } "boolean value - type" in { type Bool = Boolean - def func = quote { - (q: Bool) => q + def func = quote { (q: Bool) => + q } func.ast.quat mustEqual Quat.BooleanValue } "value" in { - def func = quote { - (q: Int) => q + def func = quote { (q: Int) => + q } func.ast.quat mustEqual Quat.Value } @@ -302,8 +322,8 @@ class QuatSpec extends AnyFreeSpec { } "case class" in { case class MyPerson(name: String, isRussian: Boolean) - def func = quote { - (q: MyPerson) => q + def func = quote { (q: MyPerson) => + q } func.ast.quat mustEqual Quat.Product("Prod", "name" -> Quat.Value, "isRussian" -> Quat.BooleanValue) } @@ -317,8 +337,8 @@ class QuatSpec extends AnyFreeSpec { } "interface" in { trait LikePerson { def name: String; def isRussian: Boolean } - def func = quote { - (q: LikePerson) => q + def func = quote { (q: LikePerson) => + q } func.ast.quat mustEqual Quat.Generic } @@ -340,27 +360,27 @@ class QuatSpec extends AnyFreeSpec { // func.ast.quat mustEqual Quat.Product("Prod", "name" -> Quat.Value, "isRussian" -> Quat.BooleanValue) } "boundary with value" in { - def func[T <: Int] = quote { - (q: T) => q + def func[T <: Int] = quote { (q: T) => + q } func.ast.quat mustEqual Quat.Value } "boundary with value - boolean" in { - def func[T <: Boolean] = quote { - (q: T) => q + def func[T <: Boolean] = quote { (q: T) => + q } func.ast.quat mustEqual Quat.BooleanValue } "boundary with value and type - boolean" in { type Bool = Boolean - def func[T <: Bool] = quote { - (q: T) => q + def func[T <: Bool] = quote { (q: T) => + q } func.ast.quat mustEqual Quat.BooleanValue } "any" in { - def func = quote { - (q: Any) => q + def func = quote { (q: Any) => + q } func.ast.quat mustEqual Quat.Generic } diff --git a/quill-sql/src/test/scala/io/getquill/sanity/SimpleBatchWithInfix.scala b/quill-sql/src/test/scala/io/getquill/sanity/SimpleBatchWithInfix.scala index fe8a2be2f..b2ff36743 100644 --- a/quill-sql/src/test/scala/io/getquill/sanity/SimpleBatchWithInfix.scala +++ b/quill-sql/src/test/scala/io/getquill/sanity/SimpleBatchWithInfix.scala @@ -16,9 +16,7 @@ object SimpleBatchWithInfix extends Spec { case class Person[T](name: String, age: Int) val names = List("Joe", "Jack") inline def q = quote { - query[Person[String]].filter(p => - liftQuery(names).contains(p.name) && sql"fun(${p.name})".pure.as[Boolean] - ) + query[Person[String]].filter(p => liftQuery(names).contains(p.name) && sql"fun(${p.name})".pure.as[Boolean]) } ctx.run(q).triple mustEqual ( "SELECT p.name, p.age FROM Person p WHERE p.name IN (?) AND fun(p.name)", diff --git a/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapRunSanityTest.scala b/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapRunSanityTest.scala index a01ef53e8..6a88845e1 100644 --- a/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapRunSanityTest.scala +++ b/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapRunSanityTest.scala @@ -29,7 +29,11 @@ class SimpleMapRunSanityTest extends Spec { q.map(p => p.name) } val quat = quatOf[SanePerson] - qq.ast mustEqual Map(Entity("SanePerson", List(), quat.probit), Ident("p", quat), Property(Ident("p", quat), "name")) + qq.ast mustEqual Map( + Entity("SanePerson", List(), quat.probit), + Ident("p", quat), + Property(Ident("p", quat), "name") + ) val ctx = new MirrorContext(MirrorIdiom, Literal) import ctx._ val output = ctx.run(qq).string diff --git a/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSanityTest.scala b/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSanityTest.scala index fb17b5d68..6c7d142aa 100644 --- a/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSanityTest.scala +++ b/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSanityTest.scala @@ -24,7 +24,11 @@ class SimpleMapSanityTest extends Spec { q.map(p => p.name) } val quat = quatOf[SanePerson] - qq.ast mustEqual Map(Entity("SanePerson", List(), quat.probit), Ident("p", quat), Property(Ident("p", quat), "name")) + qq.ast mustEqual Map( + Entity("SanePerson", List(), quat.probit), + Ident("p", quat), + Property(Ident("p", quat), "name") + ) } } diff --git a/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSqlSanityTest.scala b/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSqlSanityTest.scala index a228ede0a..d1097b762 100644 --- a/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSqlSanityTest.scala +++ b/quill-sql/src/test/scala/io/getquill/sanity/SimpleMapSqlSanityTest.scala @@ -25,7 +25,11 @@ class SimpleMapSqlSanityTest extends Spec { q.map(p => p.name) } val quat = quatOf[SanePerson] - qq.ast mustEqual Map(Entity("SanePerson", List(), quat.probit), Ident("p", quat), Property(Ident("p", quat), "name")) + qq.ast mustEqual Map( + Entity("SanePerson", List(), quat.probit), + Ident("p", quat), + Property(Ident("p", quat), "name") + ) val ctx = new MirrorContext(MirrorSqlDialect, Literal) import ctx._ val output = ctx.run(qq).string diff --git a/quill-sql/src/test/scala/io/getquill/sanity/SimplePrepareSpec.scala b/quill-sql/src/test/scala/io/getquill/sanity/SimplePrepareSpec.scala index 5a46ceb3e..37f7c2b15 100644 --- a/quill-sql/src/test/scala/io/getquill/sanity/SimplePrepareSpec.scala +++ b/quill-sql/src/test/scala/io/getquill/sanity/SimplePrepareSpec.scala @@ -18,15 +18,15 @@ class SimplePrepareSpec extends Spec { case class Person(name: String, age: Int) "query" in { - inline def q = quote { query[Person] } - val result = prepare(q) + inline def q = quote(query[Person]) + val result = prepare(q) result.sql mustEqual "SELECT x.name, x.age FROM Person x" } "batch" in { - val list = List(Person("Joe", 1), Person("Jack", 2)) - inline def q = quote { liftQuery(list).foreach(p => query[Person].insertValue(p)) } - val result = prepare(q) + val list = List(Person("Joe", 1), Person("Jack", 2)) + inline def q = quote(liftQuery(list).foreach(p => query[Person].insertValue(p))) + val result = prepare(q) result.groups.length mustEqual 1 result.groups(0)._1 mustEqual "INSERT INTO Person (name,age) VALUES (?, ?)" result.groups(0)._2.map(_.data) mustEqual List(Seq(("_1", "Joe"), ("_2", 1)), Seq(("_1", "Jack"), ("_2", 2))) diff --git a/quill-zio/src/main/scala/io/getquill/context/qzio/ImplicitSyntax.scala b/quill-zio/src/main/scala/io/getquill/context/qzio/ImplicitSyntax.scala index ad1ef31f8..a6178757c 100644 --- a/quill-zio/src/main/scala/io/getquill/context/qzio/ImplicitSyntax.scala +++ b/quill-zio/src/main/scala/io/getquill/context/qzio/ImplicitSyntax.scala @@ -1,12 +1,12 @@ package io.getquill.context.qzio import zio.stream.ZStream -import zio.{ Tag, IO, ZIO } +import zio.{Tag, IO, ZIO} import zio.ZEnvironment /** - * Use to provide `run(myQuery)` calls with a context implicitly saving the need to provide things multiple times. - * For example in JDBC: + * Use to provide `run(myQuery)` calls with a context implicitly saving the need + * to provide things multiple times. For example in JDBC: * {{{ * case class MyQueryService(ds: DataSource with Closeable) { * import Ctx._ @@ -24,8 +24,9 @@ import zio.ZEnvironment * val alexes = Ctx.run(query[Person].filter(p => p.name == "Alex")).onDataSource.provide(Has(ds)) * }}} * - * For other contexts where the environment returned from `run(myQuery)` just the session itself, - * usage is even simpler. For instance, in quill-zio-cassandra, you only need to specify `implicitly`. + * For other contexts where the environment returned from `run(myQuery)` just + * the session itself, usage is even simpler. For instance, in + * quill-zio-cassandra, you only need to specify `implicitly`. * * {{{ * case class MyQueryService(cs: CassandraZioSession) { @@ -37,10 +38,13 @@ import zio.ZEnvironment * def alexes = Ctx.run { query[Person].filter(p => p.name == "Alex") }.implicitly * } * }}} - * */ object ImplicitSyntax { - /** A new type that indicates that the value `R` should be made available to the environment implicitly. */ + + /** + * A new type that indicates that the value `R` should be made available to + * the environment implicitly. + */ final case class Implicit[R](env: R) implicit final class ImplicitSyntaxOps[R, E, A](private val self: ZIO[R, E, A]) extends AnyVal { @@ -48,6 +52,7 @@ object ImplicitSyntax { } implicit final class StreamImplicitSyntaxOps[R, E, A](private val self: ZStream[R, E, A]) extends AnyVal { - def implicitly(implicit r: Implicit[R], tag: Tag[R]): ZStream[Any, E, A] = self.provideEnvironment(ZEnvironment(r.env)) + def implicitly(implicit r: Implicit[R], tag: Tag[R]): ZStream[Any, E, A] = + self.provideEnvironment(ZEnvironment(r.env)) } } diff --git a/quill-zio/src/main/scala/io/getquill/context/qzio/ZioContext.scala b/quill-zio/src/main/scala/io/getquill/context/qzio/ZioContext.scala index 8b4626416..7fd5a54da 100644 --- a/quill-zio/src/main/scala/io/getquill/context/qzio/ZioContext.scala +++ b/quill-zio/src/main/scala/io/getquill/context/qzio/ZioContext.scala @@ -1,23 +1,31 @@ package io.getquill.context.qzio import io.getquill.NamingStrategy -import io.getquill.context.{ Context, ExecutionInfo, ContextVerbStream } +import io.getquill.context.{Context, ExecutionInfo, ContextVerbStream} import zio.ZIO import zio.stream.ZStream -trait ZioContext[+Idiom <: io.getquill.idiom.Idiom, +Naming <: NamingStrategy] extends Context[Idiom, Naming] - with ContextVerbStream[Idiom, Naming] { +trait ZioContext[+Idiom <: io.getquill.idiom.Idiom, +Naming <: NamingStrategy] + extends Context[Idiom, Naming] + with ContextVerbStream[Idiom, Naming] { type Error type Environment // It's nice that we don't actually have to import any JDBC libraries to have a Connection type here - override type StreamResult[T] = ZStream[Environment, Error, T] - override type Result[T] = ZIO[Environment, Error, T] - override type RunQueryResult[T] = List[T] + override type StreamResult[T] = ZStream[Environment, Error, T] + override type Result[T] = ZIO[Environment, Error, T] + override type RunQueryResult[T] = List[T] override type RunQuerySingleResult[T] = T // Need explicit return-type annotations due to scala/bug#8356. Otherwise macro system will not understand Result[Long]=Task[Long] etc... - def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZIO[Environment, Error, List[T]] - def executeQuerySingle[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)(info: ExecutionInfo, dc: Runner): ZIO[Environment, Error, T] + def executeQuery[T](sql: String, prepare: Prepare = identityPrepare, extractor: Extractor[T] = identityExtractor)( + info: ExecutionInfo, + dc: Runner + ): ZIO[Environment, Error, List[T]] + def executeQuerySingle[T]( + sql: String, + prepare: Prepare = identityPrepare, + extractor: Extractor[T] = identityExtractor + )(info: ExecutionInfo, dc: Runner): ZIO[Environment, Error, T] } diff --git a/quill-zio/src/main/scala/io/getquill/context/qzio/ZioTranslateContext.scala b/quill-zio/src/main/scala/io/getquill/context/qzio/ZioTranslateContext.scala index 65a44157e..8593432db 100644 --- a/quill-zio/src/main/scala/io/getquill/context/qzio/ZioTranslateContext.scala +++ b/quill-zio/src/main/scala/io/getquill/context/qzio/ZioTranslateContext.scala @@ -1,20 +1,20 @@ package io.getquill.context.qzio import io.getquill.NamingStrategy -import io.getquill.context.{ Context, ContextEffect } +import io.getquill.context.{Context, ContextEffect} import io.getquill.idiom.Idiom import zio.ZIO import io.getquill.context.ContextTranslateMacro trait ZioTranslateContext[+Dialect <: io.getquill.idiom.Idiom, +Naming <: NamingStrategy] - extends Context[Dialect, Naming] - with ContextTranslateMacro[Dialect, Naming] { + extends Context[Dialect, Naming] + with ContextTranslateMacro[Dialect, Naming] { type Error type Environment override type TranslateResult[T] = ZIO[Environment, Error, T] - override def wrap[T](t: => T): TranslateResult[T] = ZIO.environment[Environment].as(t) + override def wrap[T](t: => T): TranslateResult[T] = ZIO.environment[Environment].as(t) override def push[A, B](result: TranslateResult[A])(f: A => B): TranslateResult[B] = result.map(f) - override def seq[A](list: List[TranslateResult[A]]): TranslateResult[List[A]] = ZIO.collectAll(list) + override def seq[A](list: List[TranslateResult[A]]): TranslateResult[List[A]] = ZIO.collectAll(list) }