Uploaded image for project: 'Hive'
  1. Hive
  2. HIVE-25717

INSERT INTO on external MariaDB/MySQL table fails silently

    XMLWordPrintableJSON

Details

    Description

      MariaDB/MySQL

      CREATE TABLE country (id   int, name varchar(20));
      
      insert into country values (1, 'India');
      insert into country values (2, 'Russia');
      insert into country values (3, 'USA');
      

      Hive

      CREATE EXTERNAL TABLE country (id int, name varchar(20))
          STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'
      TBLPROPERTIES (
          "hive.sql.database.type" = "MYSQL",
          "hive.sql.jdbc.driver" = "com.mysql.jdbc.Driver",
          "hive.sql.jdbc.url" = "jdbc:mysql://localhost:3306/qtestDB",
          "hive.sql.dbcp.username" = "root",
          "hive.sql.dbcp.password" = "qtestpassword",
          "hive.sql.table" = "country"
          );
      
      INSERT INTO country VALUES (8, 'Hungary');
      SELECT * FROM country;
      

      Expected results

      ID NAME
      1 India
      2 Russia
      3 USA
      8 Hungary

      Actual results

      ID NAME
      1 India
      2 Russia
      3 USA

      The INSERT INTO statement finishes without showing any kind of problem in the logs but the row is not inserted in the table.

      Running the test it comes back green although the following exception is printed in the System.err (not in the logs).

      java.sql.SQLException: Parameter metadata not available for the given statement
              at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:129)
              at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:97)
              at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:89)
              at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:63)
              at com.mysql.cj.jdbc.MysqlParameterMetadata.checkAvailable(MysqlParameterMetadata.java:86)
              at com.mysql.cj.jdbc.MysqlParameterMetadata.getParameterType(MysqlParameterMetadata.java:138)
              at org.apache.hive.storage.jdbc.DBRecordWritable.write(DBRecordWritable.java:67)
              at org.apache.hadoop.mapreduce.lib.db.DBOutputFormat$DBRecordWriter.write(DBOutputFormat.java:122)
              at org.apache.hive.storage.jdbc.JdbcRecordWriter.write(JdbcRecordWriter.java:47)
              at org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:1160)
              at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888)
              at org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94)
              at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888)
              at org.apache.hadoop.hive.ql.exec.UDTFOperator.forwardUDTFOutput(UDTFOperator.java:133)
              at org.apache.hadoop.hive.ql.udf.generic.UDTFCollector.collect(UDTFCollector.java:45)
              at org.apache.hadoop.hive.ql.udf.generic.GenericUDTF.forward(GenericUDTF.java:110)
              at org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline.process(GenericUDTFInline.java:64)
              at org.apache.hadoop.hive.ql.exec.UDTFOperator.process(UDTFOperator.java:116)
              at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888)
              at org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:94)
              at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:888)
              at org.apache.hadoop.hive.ql.exec.TableScanOperator.process(TableScanOperator.java:173)
              at org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:154)
              at org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:552)
              at org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.processRow(MapRecordSource.java:101)
              at org.apache.hadoop.hive.ql.exec.tez.MapRecordSource.pushRecord(MapRecordSource.java:83)
              at org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.run(MapRecordProcessor.java:414)
              at org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:311)
              at org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:277)
              at org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:381)
              at org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:82)
              at org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:69)
              at java.security.AccessController.doPrivileged(Native Method)
              at javax.security.auth.Subject.doAs(Subject.java:422)
              at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682)
              at org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:69)
              at org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:39)
              at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36)
              at org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:118)
              at java.util.concurrent.FutureTask.run(FutureTask.java:266)
              at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
              at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
              at java.lang.Thread.run(Thread.java:748)
      

      Attachments

        1. jdbc_table_dml_mysql.q
          0.5 kB
          Stamatis Zampetakis

        Issue Links

          Activity

            People

              zabetak Stamatis Zampetakis
              zabetak Stamatis Zampetakis
              Votes:
              0 Vote for this issue
              Watchers:
              2 Start watching this issue

              Dates

                Created:
                Updated:

                Time Tracking

                  Estimated:
                  Original Estimate - Not Specified
                  Not Specified
                  Remaining:
                  Remaining Estimate - 0h
                  0h
                  Logged:
                  Time Spent - 2h 50m
                  2h 50m