Hi!
我是用flink 1.13.1版本,执行如下hive sql: CREATE CATALOG tempo_df_hive_default_catalog
WITH( 'type' = 'hive', 'default-database' = 'default' ); USE CATALOG tempo_df_hive_default_catalog; CREATE TABLE IF NOT EXISTS default
.tempo_blackhole_table
( f0 INT ); use cosldatacenter; INSERT INTO dw_riginfoparam
( large_equip_id
, equip_code
, equip_name
, enqueue_date
, shi_total_len
, shi_type_width
, shi_type_depth
, moonpool
) SELECT mle.large_equip_id
, mle.equip_code
, mle.equip_name
, mle.enqueue_date
, mle.shi_total_len
, mle.shi_type_width
, mle.shi_type_depth
, CASE WHEN mipd.param_cn
= '月池尺寸' THEN mipv.param_value
END AS Moonpool
from ods_emp_maindata_iadc_paramvalue
mipv INNER JOIN ods_emp_maindata_iadc_paramdef
mipd ON mipv.param_id
= mipd.param_id
inner JOIN ods_emp_md_large_equip
mle ON mipv.SUBJECT_ID
= mle.LARGE_EQUIP_ID
; INSERT INTO default
.tempo_blackhole_table
SELECT 1 错误: Caused by: org.apache.hadoop.hive.ql.parse.SemanticException: Line 2:195 Invalid table alias or column reference 'u': (possible column names are: mipv.paramvalue_id, mipv.platform_id, mipv.equipment_id, mipv.param_id, mipv.param_value, mipv.remark, mipv.create_time, mipv.creator, mipv.update_time, mipv.update_person, mipv.record_flag, mipv.subject_id, mipv.output_unit, mipv.show_seq, mipd.param_id, mipd.iadc_id, mipd.param_code, mipd.param_en, mipd.param_cn, mipd.output_standard, mipd.output_unit, mipd.param_type, mipd.param_value, mipd.remark, mipd.create_time, mipd.creator, mipd.update_time, mipd.update_person, mipd.record_flag, mle.large_equip_id, mle.equip_name, mle.equip_type, mle.equip_function, mle.equip_board, mle.ship_yard, mle.manufacturer_date, mle.enqueue_date, mle.dockrepair_date, mle.scrap_date, mle.enqueue_mode, mle.work_for_org, mle.work_in_org, mle.old_age, mle.create_time, mle.creator, mle.update_time, mle.update_person, mle.record_flag, mle.data_timestamp, mle.work_unit_id, mle.work_status, mle.work_location, mle.work_area, mle.equip_code, mle.shi_main_power, mle.shi_total_len, mle.shi_type_width, mle.shi_type_depth, mle.shi_design_draft, mle.shi_total_tonnage, mle.shi_load_tonnage, mle.remark, mle.unit_classification1, mle.unit_classification2) at org.apache.flink.table.planner.delegation.hive.copy.HiveParserSemanticAnalyzer.genAllExprNodeDesc(HiveParserSemanticAnalyzer.java:2467) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.copy.HiveParserSemanticAnalyzer.genExprNodeDesc(HiveParserSemanticAnalyzer.java:2421) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.HiveParserCalcitePlanner.genSelectLogicalPlan(HiveParserCalcitePlanner.java:2314) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.HiveParserCalcitePlanner.genLogicalPlan(HiveParserCalcitePlanner.java:2772) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.HiveParserCalcitePlanner.logicalPlan(HiveParserCalcitePlanner.java:285) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.HiveParserCalcitePlanner.genLogicalPlan(HiveParserCalcitePlanner.java:273) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.HiveParser.analyzeSql(HiveParser.java:326) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.HiveParser.processCmd(HiveParser.java:274) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.planner.delegation.hive.HiveParser.parse(HiveParser.java:217) ~[flink-sql-connector-hive-1.2.2_2.11-1.13.1.jar:1.13.1] at org.apache.flink.table.api.internal.StatementSetImpl.addInsertSql(StatementSetImpl.java:51) ~[flink-table-blink_2.11-1.13.1.jar:1.13.1]*来自志愿者整理的flink邮件归档
版权声明:本文内容由阿里云实名注册用户自发贡献,版权归原作者所有,阿里云开发者社区不拥有其著作权,亦不承担相应法律责任。具体规则请查看《阿里云开发者社区用户服务协议》和《阿里云开发者社区知识产权保护指引》。如果您发现本社区中有涉嫌抄袭的内容,填写侵权投诉表单进行举报,一经查实,本社区将立刻删除涉嫌侵权内容。