-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathcontent.json
1 lines (1 loc) · 58.2 KB
/
content.json
1
[{"title":"Spark RPC 学习总结","date":"2025-01-12T16:00:00.000Z","path":"2025/01/13/spark/sparkRpcLearning/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"}]},{"title":"Netty 入门学习","date":"2025-01-08T16:00:00.000Z","path":"2025/01/09/netty/netty-learning/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Netty","slug":"Netty","permalink":"https://dongkelun.com/tags/Netty/"}]},{"title":"log4j 单独设置某个类或者某个包的级别","date":"2024-12-26T16:00:00.000Z","path":"2024/12/27/log/log-separate-class-conf/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"}]},{"title":"Hudi 源码 | 索引总结 - tag/tagLocation","date":"2024-07-14T16:00:00.000Z","path":"2024/07/15/hudi/hudiIndex-tag/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi 索引总结 - Parquet布隆过滤器写入过程","date":"2024-07-09T16:00:00.000Z","path":"2024/07/10/hudi/hudiIndex-wirte/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi 写入流程(图)","date":"2024-07-03T16:00:00.000Z","path":"2024/07/04/hudi/hudiWriteFlow/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink 重启策略和故障恢复策略","date":"2024-06-23T16:00:00.000Z","path":"2024/06/24/flink/flink-task-failure-recovery/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"HBase","slug":"HBase","permalink":"https://dongkelun.com/tags/HBase/"}]},{"title":"Hudi extraMetadata 研究总结","date":"2024-06-12T16:00:00.000Z","path":"2024/06/13/hudi/hudiExtraMetadata/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"extraMetadata","slug":"extraMetadata","permalink":"https://dongkelun.com/tags/extraMetadata/"}]},{"title":"Hudi CLI 安装配置总结","date":"2024-06-06T16:00:00.000Z","path":"2024/06/07/hudi/HudiCliConf/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"Hudi CLI","slug":"Hudi-CLI","permalink":"https://dongkelun.com/tags/Hudi-CLI/"}]},{"title":"Hudi Spark Sql Procedures 回滚 Hudi 表数据","date":"2024-06-03T16:00:00.000Z","path":"2024/06/04/hudi/hudiSparkSQLRollback/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"记录一个 Hudi HBase 依赖冲突问题及解决方案","date":"2024-05-31T16:00:00.000Z","path":"2024/06/01/hudi/hudi-hbase-dependency-solution/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Flink MOR 学习总结","date":"2024-05-23T16:00:00.000Z","path":"2024/05/24/hudi/hudiFlinkMor/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Spark Client 配置","date":"2024-05-17T16:00:00.000Z","path":"2024/05/18/spark/sparkClientConf/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"}]},{"title":"Hive 表添加列(新增字段)","date":"2024-04-21T16:00:00.000Z","path":"2024/04/22/hive/hiveAddColumns/","categories":[],"tags":[{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Partition","slug":"Partition","permalink":"https://dongkelun.com/tags/Partition/"}]},{"title":"集群管理命令总结","date":"2024-04-10T16:00:00.000Z","path":"2024/04/11/linux/clushConf/","categories":[],"tags":[{"name":"Linux","slug":"Linux","permalink":"https://dongkelun.com/tags/Linux/"}]},{"title":"QQ 截图工具独立版安装使用","date":"2024-03-04T16:00:00.000Z","path":"2024/03/05/tool/QQScreenShot/","categories":[],"tags":[{"name":"tool","slug":"tool","permalink":"https://dongkelun.com/tags/tool/"}]},{"title":"Spark Standalone 集群配置","date":"2024-02-06T16:00:00.000Z","path":"2024/02/07/spark/standaloneConf/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"}]},{"title":"Linux 批量添加 known_hosts","date":"2024-01-04T16:00:00.000Z","path":"2024/01/05/linux/add-known-hosts/","categories":[],"tags":[{"name":"Linux","slug":"Linux","permalink":"https://dongkelun.com/tags/Linux/"}]},{"title":"Flink 日志总结","date":"2023-12-20T16:00:00.000Z","path":"2023/12/21/flink/flinkLog/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"}]},{"title":"Flink源码分析 | 读取HBase配置","date":"2023-12-11T16:00:00.000Z","path":"2023/12/12/flink/flink-source-code-hbaseConf/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"HBase","slug":"HBase","permalink":"https://dongkelun.com/tags/HBase/"}]},{"title":"Flink 读写 HBase 总结","date":"2023-12-08T16:00:00.000Z","path":"2023/12/09/flink/flinkHbase/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"HBase","slug":"HBase","permalink":"https://dongkelun.com/tags/HBase/"}]},{"title":"Flink 源码阅读笔记(3)- Flink 底层RPC框架分析","date":"2023-12-07T16:00:00.000Z","path":"2023/12/08/flink/flink-source-code--rpc/","categories":[],"tags":[{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"转载","slug":"转载","permalink":"https://dongkelun.com/tags/转载/"},{"name":"RPC","slug":"RPC","permalink":"https://dongkelun.com/tags/RPC/"}]},{"title":"谁说 AI 编程工具缺乏记忆和联想能力?简单琐碎的需求完全可以交给它","date":"2023-11-05T16:00:00.000Z","path":"2023/11/06/amazon/ai-programming-tools/","categories":[],"tags":[{"name":"ai","slug":"ai","permalink":"https://dongkelun.com/tags/ai/"},{"name":"amazon","slug":"amazon","permalink":"https://dongkelun.com/tags/amazon/"}]},{"title":"Flink 源码阅读笔记(2)- JobGraph 的生成","date":"2023-08-31T16:00:00.000Z","path":"2023/09/01/flink/flink-source-code-jobgraph/","categories":[],"tags":[{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"转载","slug":"转载","permalink":"https://dongkelun.com/tags/转载/"}]},{"title":"Hudi Flink SQL源码调试学习(二)- Transformation/StreamOperator总结 - StreamGraph和JobGraph的生成过程","date":"2023-08-28T16:00:00.000Z","path":"2023/08/29/hudi/hudiFlinkSQLSourceCodeDebug2/","categories":[],"tags":[{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink 源码阅读笔记(1)- StreamGraph 的生成","date":"2023-08-24T16:00:00.000Z","path":"2023/08/25/flink/flink-source-code-streamgraph/","categories":[],"tags":[{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"转载","slug":"转载","permalink":"https://dongkelun.com/tags/转载/"}]},{"title":"记录几个Hudi Flink使用问题及解决方法","date":"2023-08-19T16:00:00.000Z","path":"2023/08/20/hudi/hudiFlinkProblemSolvingMethod/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Flink SQL源码调试学习(一)","date":"2023-07-30T16:00:00.000Z","path":"2023/07/31/hudi/hudiFlinkSQLSourceCodeDebug1/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink 读写Kafka总结","date":"2023-07-04T16:00:00.000Z","path":"2023/07/05/flink/flinkKafka/","categories":[],"tags":[{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"}]},{"title":"Hudi Flink SQL代码示例及本地调试","date":"2023-05-28T16:00:00.000Z","path":"2023/05/29/hudiFlinkSQLCodeDebug/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink用户自定义连接器(Table API Connectors)学习总结","date":"2023-05-23T16:00:00.000Z","path":"2023/05/24/flinkUserDefinedConnector/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"}]},{"title":"Flink Hudi DataStream API代码示例","date":"2023-05-21T16:00:00.000Z","path":"2023/05/22/flinkHudiCode/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink MySQL CDC 使用总结","date":"2023-04-02T16:00:00.000Z","path":"2023/04/03/flinkMySQLCDC/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink SQL Checkpoint 学习总结","date":"2023-02-28T16:00:00.000Z","path":"2023/03/01/flinkSqlCheckpoint/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink SQL增量查询Hudi表","date":"2022-12-04T16:00:00.000Z","path":"2022/12/05/hudiFlinkSqlIncrementalQuery/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Spark SQL增量查询Hudi表","date":"2022-11-29T16:00:00.000Z","path":"2022/11/30/hudiSparkSQLIncrementalQuery/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Spark SQL Call Procedures学习总结(一)(查询统计表文件信息)","date":"2022-11-23T16:00:00.000Z","path":"2022/11/24/hudiSparkSQLCallProcedures/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi源码 | Insert源码分析总结(二)(WorkloadProfile)","date":"2022-11-15T16:00:00.000Z","path":"2022/11/16/hudiSourceCode-javaInsert2/","categories":[],"tags":[{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi源码 | Insert源码分析总结(一)(整体流程)","date":"2022-11-09T16:00:00.000Z","path":"2022/11/10/hudiSourceCode-javaInsert/","categories":[],"tags":[{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink SQL通过Hudi HMS Catalog读写Hudi并同步Hive表(强烈推荐这种方式)","date":"2022-11-01T16:00:00.000Z","path":"2022/11/02/flinkHudiHmsCatalog/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi master 0.13.0-SNAPSHOT Win10 打包异常解决","date":"2022-10-31T16:00:00.000Z","path":"2022/11/01/hudiPackageError/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink SQL操作Hudi并同步Hive使用总结","date":"2022-10-30T16:00:00.000Z","path":"2022/10/31/hudiFlinkSql/","categories":[],"tags":[{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Java Client总结|读取Hive写Hudi代码示例","date":"2022-10-25T16:00:00.000Z","path":"2022/10/26/hudiJavaClient/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"Java","slug":"Java","permalink":"https://dongkelun.com/tags/Java/"}]},{"title":"开源经验分享 | 如何从一名小白成为Apache Hudi Contributor","date":"2022-10-21T16:00:00.000Z","path":"2022/10/22/hudiContributor/","categories":[],"tags":[{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"开源","slug":"开源","permalink":"https://dongkelun.com/tags/开源/"}]},{"title":"Hudi源码|bootstrap源码分析总结(写Hudi)","date":"2022-10-17T16:00:00.000Z","path":"2022/10/18/hudiSourceCode-bootstrap/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"写Hudi异常:'Not an Avro data file' 解决方法","date":"2022-10-13T16:00:00.000Z","path":"2022/10/14/hudiNotAvro/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"异常","slug":"异常","permalink":"https://dongkelun.com/tags/异常/"}]},{"title":"利用Hudi Bootstrap转化现有Hive表的parquet/orc文件为Hudi表","date":"2022-10-11T16:00:00.000Z","path":"2022/10/12/hudiBootstrap/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Flink 读写 Ceph S3入门学习总结","date":"2022-09-30T16:00:00.000Z","path":"2022/10/01/flinkS3/","categories":[],"tags":[{"name":"Ceph","slug":"Ceph","permalink":"https://dongkelun.com/tags/Ceph/"},{"name":"S3","slug":"S3","permalink":"https://dongkelun.com/tags/S3/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"}]},{"title":"Spark 读写 Ceph S3入门学习总结","date":"2022-09-29T16:00:00.000Z","path":"2022/09/30/saprkS3/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Ceph","slug":"Ceph","permalink":"https://dongkelun.com/tags/Ceph/"},{"name":"S3","slug":"S3","permalink":"https://dongkelun.com/tags/S3/"}]},{"title":"Ceph分布式集群安装配置","date":"2022-09-28T16:00:00.000Z","path":"2022/09/29/cephInstallConf/","categories":[],"tags":[{"name":"Ceph","slug":"Ceph","permalink":"https://dongkelun.com/tags/Ceph/"},{"name":"S3","slug":"S3","permalink":"https://dongkelun.com/tags/S3/"}]},{"title":"Flink SQL 客户端查询Hive配置及问题解决","date":"2022-08-25T16:00:00.000Z","path":"2022/08/26/flinkSqlClientQueryHive/","categories":[],"tags":[{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Flink","slug":"Flink","permalink":"https://dongkelun.com/tags/Flink/"}]},{"title":"Hudi Spark SQL源码学习总结-select(查询)","date":"2022-08-14T16:00:00.000Z","path":"2022/08/15/hudiSparkSqlSourceCodeLearning-select/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Spark源码学习总结-spark.read.format(\"hudi\").load(2)","date":"2022-08-11T16:00:00.000Z","path":"2022/08/12/hudiSparkSourceCodeLearning-dfLoad2/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Spark源码学习总结-spark.read.format(\"hudi\").load","date":"2022-08-10T16:00:00.000Z","path":"2022/08/11/hudiSparkSourceCodeLearning-dfLoad/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Spark源码学习总结-df.write.format(\"hudi\").save","date":"2022-08-02T16:00:00.000Z","path":"2022/08/03/hudiSparkSourceCodeLearning-dfSave/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Spark SQL源码学习总结-CTAS","date":"2022-07-29T16:00:00.000Z","path":"2022/07/30/hudiSparkSqlSourceCodeLearning-ctas/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Spark SQL源码学习总结-Create Table","date":"2022-07-19T16:00:00.000Z","path":"2022/07/20/hudiSparkSqlSourceCodeLearning/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Spark3.12+Kyuubi1.5.2+kyuubi-spark-authz源码编译打包+部署配置HA","date":"2022-07-05T16:00:00.000Z","path":"2022/07/06/sparkKyuubiPackConf/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Kyuubi","slug":"Kyuubi","permalink":"https://dongkelun.com/tags/Kyuubi/"},{"name":"Ranger","slug":"Ranger","permalink":"https://dongkelun.com/tags/Ranger/"}]},{"title":"Hudi查询类型/视图总结","date":"2022-06-28T16:00:00.000Z","path":"2022/06/29/hudiQueryTypes/","categories":[],"tags":[{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hive增量查询Hudi表","date":"2022-06-26T16:00:00.000Z","path":"2022/06/27/hiveIncrQueryHudi/","categories":[],"tags":[{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hadoop源码编译打包","date":"2022-06-21T16:00:00.000Z","path":"2022/06/22/hadoopSourceCodeCompilation/","categories":[],"tags":[{"name":"Hadoop","slug":"Hadoop","permalink":"https://dongkelun.com/tags/Hadoop/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"}]},{"title":"Hudi DeltaStreamer使用总结","date":"2022-06-11T16:00:00.000Z","path":"2022/06/12/hudiDeltaStreamer/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Spark SQL总结","date":"2022-05-12T16:00:00.000Z","path":"2022/05/13/hudiSparkSQL/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Apache Hudi 入门学习总结","date":"2022-05-11T16:00:00.000Z","path":"2022/05/12/hudiIntroduction/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Clean Policy 清理策略实现分析","date":"2022-04-18T16:00:00.000Z","path":"2022/04/19/hudiCleanPolicy/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi Clean 清理文件实现分析","date":"2022-04-17T16:00:00.000Z","path":"2022/04/18/hudiClean/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Kyuubi 安装配置总结","date":"2022-03-24T16:00:00.000Z","path":"2022/03/25/kyuubiConf/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"Kyuubi","slug":"Kyuubi","permalink":"https://dongkelun.com/tags/Kyuubi/"}]},{"title":"SparkSQL JDBC 查询Oracle、MySQL 转化为Hudi表","date":"2022-02-14T16:00:00.000Z","path":"2022/02/15/sparkSqlJdbc2Hudi/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"SparkSQL","slug":"SparkSQL","permalink":"https://dongkelun.com/tags/SparkSQL/"}]},{"title":"Spark Thrift Server HA 解决方案","date":"2022-02-10T16:00:00.000Z","path":"2022/02/11/sparkThriftServerHA/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"}]},{"title":"Presto查询Hudi异常解决","date":"2021-12-22T16:00:00.000Z","path":"2021/12/23/prestoHudiException/","categories":[],"tags":[{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"},{"name":"异常","slug":"异常","permalink":"https://dongkelun.com/tags/异常/"},{"name":"Presto","slug":"Presto","permalink":"https://dongkelun.com/tags/Presto/"}]},{"title":"利用Submarin集成Spark-Ranger","date":"2021-12-01T16:00:00.000Z","path":"2021/12/02/submarinSparkRanger/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Ranger","slug":"Ranger","permalink":"https://dongkelun.com/tags/Ranger/"},{"name":"Submarin","slug":"Submarin","permalink":"https://dongkelun.com/tags/Submarin/"}]},{"title":"Hudi preCombinedField 总结(二)-源码分析","date":"2021-11-29T16:00:00.000Z","path":"2021/11/30/hudiPreCombineField2/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Hudi preCombinedField 总结","date":"2021-07-09T16:00:00.000Z","path":"2021/07/10/hudiPreCombinedField/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hudi","slug":"Hudi","permalink":"https://dongkelun.com/tags/Hudi/"}]},{"title":"Spark DataFrame 添加列总结","date":"2021-06-15T16:00:00.000Z","path":"2021/06/16/dfAddCols/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"}]},{"title":"通过DBeaver本地访问远程Kerberos环境下的Hive","date":"2021-06-02T16:00:00.000Z","path":"2021/06/03/dbeaverConnectKerberosHive/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"DBeaver","slug":"DBeaver","permalink":"https://dongkelun.com/tags/DBeaver/"},{"name":"Kerberos","slug":"Kerberos","permalink":"https://dongkelun.com/tags/Kerberos/"}]},{"title":"Spark 本地连接远程服务器上带有kerberos认证的Hive","date":"2021-05-18T16:00:00.000Z","path":"2021/05/19/localSparkHiveWithKerberos/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"hive","slug":"hive","permalink":"https://dongkelun.com/tags/hive/"},{"name":"kerberos","slug":"kerberos","permalink":"https://dongkelun.com/tags/kerberos/"}]},{"title":"Java 连接 Kerberos认证下的Spark Thrift Server/Hive Server总结","date":"2021-05-09T16:00:00.000Z","path":"2021/05/10/javaSparkThriftServerWithKerberos/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"hive","slug":"hive","permalink":"https://dongkelun.com/tags/hive/"},{"name":"java","slug":"java","permalink":"https://dongkelun.com/tags/java/"},{"name":"kerberos","slug":"kerberos","permalink":"https://dongkelun.com/tags/kerberos/"}]},{"title":"Spark覆盖写入mysql表但不改变已有的表结构","date":"2021-04-28T16:00:00.000Z","path":"2021/04/29/SparkMysqlOverwriteTruncateTable/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"mysql","slug":"mysql","permalink":"https://dongkelun.com/tags/mysql/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"}]},{"title":"Spark Sql 执行流程源码阅读笔记","date":"2021-04-22T16:00:00.000Z","path":"2021/04/23/sparkSqlExecutionFlow/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"}]},{"title":"Spark Sql 创建 Hive表的压缩格式","date":"2021-03-18T16:00:00.000Z","path":"2021/03/19/SparkCompression/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"}]},{"title":"Java 连接 Spark Thrift Server/Hive Server总结","date":"2021-02-18T16:00:00.000Z","path":"2021/02/19/javaSparkThriftServer/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"hive","slug":"hive","permalink":"https://dongkelun.com/tags/hive/"},{"name":"java","slug":"java","permalink":"https://dongkelun.com/tags/java/"}]},{"title":"Spark CoarseGrainedExecutorBackend 启动流程","date":"2020-12-25T16:00:00.000Z","path":"2020/12/26/sparkCoarseGrainedExecutorBackendStartedProcess/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"}]},{"title":"Spark RPC 学习笔记","date":"2020-12-22T16:00:00.000Z","path":"2020/12/23/sparkRPC/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"源码","slug":"源码","permalink":"https://dongkelun.com/tags/源码/"}]},{"title":"java.lang.UnsatisfiedLinkError:org.apache.hadoop.io.nativeio.NativeIO$Windows.access0(Ljava/lang/String;I)Z","date":"2020-11-16T16:00:00.000Z","path":"2020/11/17/hadoopExcepetion2/","categories":[],"tags":[{"name":"Hadoop","slug":"Hadoop","permalink":"https://dongkelun.com/tags/Hadoop/"},{"name":"ambari","slug":"ambari","permalink":"https://dongkelun.com/tags/ambari/"}]},{"title":"vmware centos7 克隆","date":"2020-10-25T16:00:00.000Z","path":"2020/10/26/vmwareClone/","categories":[],"tags":[{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"},{"name":"vmware","slug":"vmware","permalink":"https://dongkelun.com/tags/vmware/"}]},{"title":"Spark 3.0.1 Structured Streaming 提交程序异常解决","date":"2020-10-08T16:00:00.000Z","path":"2020/10/09/spark3StreamingException/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"异常","slug":"异常","permalink":"https://dongkelun.com/tags/异常/"}]},{"title":"Java API 连接 Hbase示例","date":"2020-08-18T16:00:00.000Z","path":"2020/08/19/javaHbase/","categories":[],"tags":[{"name":"hbase","slug":"hbase","permalink":"https://dongkelun.com/tags/hbase/"},{"name":"java","slug":"java","permalink":"https://dongkelun.com/tags/java/"}]},{"title":"centos7 hbase1.4.13+hadoop2.7.1+单机环境搭建","date":"2020-08-13T16:00:00.000Z","path":"2020/08/14/hbaseConf/","categories":[],"tags":[{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"},{"name":"hbase","slug":"hbase","permalink":"https://dongkelun.com/tags/hbase/"},{"name":"hadoop","slug":"hadoop","permalink":"https://dongkelun.com/tags/hadoop/"}]},{"title":"Spark 判断DataFrame 长度为0","date":"2020-03-13T16:00:00.000Z","path":"2020/03/14/sparkDfLengthIs0/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"}]},{"title":"Spark DataFrame isin方法使用","date":"2020-01-20T16:00:00.000Z","path":"2020/01/21/sparkDfIsin/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"}]},{"title":"Spark 覆盖写Hive分区表,只覆盖部分对应分区","date":"2020-01-15T16:00:00.000Z","path":"2020/01/16/sparkHivePartitionOverwrite/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Partition","slug":"Partition","permalink":"https://dongkelun.com/tags/Partition/"}]},{"title":"Python 连接 MYSQL","date":"2020-01-02T16:00:00.000Z","path":"2020/01/03/pythonMysql/","categories":[],"tags":[{"name":"Python","slug":"Python","permalink":"https://dongkelun.com/tags/Python/"},{"name":"MYSQL","slug":"MYSQL","permalink":"https://dongkelun.com/tags/MYSQL/"}]},{"title":"windows 安装 pycharm 笔记","date":"2020-01-01T16:00:00.000Z","path":"2020/01/02/pycharmInstall/","categories":[],"tags":[{"name":"python","slug":"python","permalink":"https://dongkelun.com/tags/python/"},{"name":"pycharm","slug":"pycharm","permalink":"https://dongkelun.com/tags/pycharm/"}]},{"title":"Python 处理Excel总结(1)","date":"2019-12-29T16:00:00.000Z","path":"2019/12/30/pythonExcel/","categories":[],"tags":[{"name":"python","slug":"python","permalink":"https://dongkelun.com/tags/python/"},{"name":"excel","slug":"excel","permalink":"https://dongkelun.com/tags/excel/"}]},{"title":"python anaconda 安装使用","date":"2019-12-26T16:00:00.000Z","path":"2019/12/27/anacondaInstall/","categories":[],"tags":[{"name":"python","slug":"python","permalink":"https://dongkelun.com/tags/python/"},{"name":"anaconda","slug":"anaconda","permalink":"https://dongkelun.com/tags/anaconda/"}]},{"title":"英语学习","date":"2019-12-23T16:00:00.000Z","path":"2019/12/24/2019-12-24/","categories":[],"tags":[{"name":"english","slug":"english","permalink":"https://dongkelun.com/tags/english/"}]},{"title":"英语学习","date":"2019-12-22T16:00:00.000Z","path":"2019/12/23/2019-12-23/","categories":[],"tags":[{"name":"english","slug":"english","permalink":"https://dongkelun.com/tags/english/"}]},{"title":"Oracle、Spark、Hive SQL 正则总结","date":"2019-12-01T16:00:00.000Z","path":"2019/12/02/SQLRegExp/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"SQL","slug":"SQL","permalink":"https://dongkelun.com/tags/SQL/"},{"name":"Oracle","slug":"Oracle","permalink":"https://dongkelun.com/tags/Oracle/"},{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"}]},{"title":"sbt 支持打包Java程序","date":"2019-11-18T16:00:00.000Z","path":"2019/11/19/sbtSupportJava/","categories":[],"tags":[{"name":"sbt","slug":"sbt","permalink":"https://dongkelun.com/tags/sbt/"}]},{"title":"SQL UNION 和 UNION ALL","date":"2019-07-28T16:00:00.000Z","path":"2019/07/29/sqlUnion/","categories":[],"tags":[{"name":"SQL","slug":"SQL","permalink":"https://dongkelun.com/tags/SQL/"},{"name":"UNION","slug":"UNION","permalink":"https://dongkelun.com/tags/UNION/"}]},{"title":"IDEA 新建Maven项目同时支持Java和Scala两种语言","date":"2019-07-15T16:00:00.000Z","path":"2019/07/16/ideaMavenScalaJava/","categories":[],"tags":[{"name":"Scala","slug":"Scala","permalink":"https://dongkelun.com/tags/Scala/"},{"name":"Java","slug":"Java","permalink":"https://dongkelun.com/tags/Java/"},{"name":"IDEA","slug":"IDEA","permalink":"https://dongkelun.com/tags/IDEA/"},{"name":"Maven","slug":"Maven","permalink":"https://dongkelun.com/tags/Maven/"}]},{"title":"Centos7 ELKB 7.2.0版本单机部署","date":"2019-07-02T16:00:00.000Z","path":"2019/07/03/elkbConf/","categories":[],"tags":[{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"},{"name":"ELKB","slug":"ELKB","permalink":"https://dongkelun.com/tags/ELKB/"}]},{"title":"Kafka学习笔记(1)","date":"2019-06-09T16:00:00.000Z","path":"2019/06/10/kafkaLearing/","categories":[],"tags":[{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"},{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"}]},{"title":"Linux 定时任务命令crontab学习总结","date":"2019-06-05T16:00:00.000Z","path":"2019/06/06/linuxCrontab/","categories":[],"tags":[{"name":"Linux","slug":"Linux","permalink":"https://dongkelun.com/tags/Linux/"},{"name":"crontab","slug":"crontab","permalink":"https://dongkelun.com/tags/crontab/"}]},{"title":"Sealed classes","date":"2019-06-04T16:00:00.000Z","path":"2019/06/05/sealedClasses/","categories":[],"tags":[{"name":"scala","slug":"scala","permalink":"https://dongkelun.com/tags/scala/"}]},{"title":"将Vue项目部署到Github Page上","date":"2019-06-03T16:00:00.000Z","path":"2019/06/04/vueDeployGithubPages/","categories":[],"tags":[{"name":"vue","slug":"vue","permalink":"https://dongkelun.com/tags/vue/"},{"name":"git","slug":"git","permalink":"https://dongkelun.com/tags/git/"},{"name":"git-page","slug":"git-page","permalink":"https://dongkelun.com/tags/git-page/"}]},{"title":"Spark ML LR 用 setWeightCol 解决数据不平衡","date":"2019-06-02T16:00:00.000Z","path":"2019/06/03/use-setWeightCol-dealing-with-unbalanced-datasets-in-spark-ml/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"算法","slug":"算法","permalink":"https://dongkelun.com/tags/算法/"},{"name":"ml","slug":"ml","permalink":"https://dongkelun.com/tags/ml/"}]},{"title":"英语学习","date":"2019-05-30T16:00:00.000Z","path":"2019/05/31/2019-05-31/","categories":[],"tags":[{"name":"english","slug":"english","permalink":"https://dongkelun.com/tags/english/"}]},{"title":"Spark读取CSV异常 java.lang.ArrayIndexOutOfBoundsException:62","date":"2019-05-29T16:00:00.000Z","path":"2019/05/30/sparkCsvException/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"异常","slug":"异常","permalink":"https://dongkelun.com/tags/异常/"}]},{"title":"Oracle和MySQL如何判断是否为空或NULL","date":"2019-05-28T16:00:00.000Z","path":"2019/05/29/oracleAndMysqlNull/","categories":[],"tags":[{"name":"Oracle","slug":"Oracle","permalink":"https://dongkelun.com/tags/Oracle/"},{"name":"sql","slug":"sql","permalink":"https://dongkelun.com/tags/sql/"},{"name":"MySQL","slug":"MySQL","permalink":"https://dongkelun.com/tags/MySQL/"}]},{"title":"MySQL和Oracle字符串截取函数用法总结(比较)","date":"2019-05-27T16:00:00.000Z","path":"2019/05/28/mysqlAndOracleSubString/","categories":[],"tags":[{"name":"Oracle","slug":"Oracle","permalink":"https://dongkelun.com/tags/Oracle/"},{"name":"sql","slug":"sql","permalink":"https://dongkelun.com/tags/sql/"},{"name":"MySQL","slug":"MySQL","permalink":"https://dongkelun.com/tags/MySQL/"}]},{"title":"Logistic Regression 学习总结","date":"2019-05-26T16:00:00.000Z","path":"2019/05/27/LR1/","categories":[],"tags":[{"name":"算法","slug":"算法","permalink":"https://dongkelun.com/tags/算法/"},{"name":"Logistic Regression","slug":"Logistic-Regression","permalink":"https://dongkelun.com/tags/Logistic-Regression/"}]},{"title":"Nginx 安装配置","date":"2019-04-23T16:00:00.000Z","path":"2019/04/24/nginxInstallConf/","categories":[],"tags":[{"name":"Nginx","slug":"Nginx","permalink":"https://dongkelun.com/tags/Nginx/"}]},{"title":"Vue 自动获取本地ip,并打开浏览器","date":"2019-03-27T16:00:00.000Z","path":"2019/03/28/vueAutoIpAndOpenBrowser/","categories":[],"tags":[{"name":"Vue","slug":"Vue","permalink":"https://dongkelun.com/tags/Vue/"}]},{"title":"Vue版本Echarts中国地图三级钻取及Vue踩坑笔记","date":"2019-02-17T16:00:00.000Z","path":"2019/02/18/vueEchartsMap/","categories":[],"tags":[{"name":"Echarts","slug":"Echarts","permalink":"https://dongkelun.com/tags/Echarts/"},{"name":"front-end","slug":"front-end","permalink":"https://dongkelun.com/tags/front-end/"},{"name":"Vue","slug":"Vue","permalink":"https://dongkelun.com/tags/Vue/"}]},{"title":"Spark 异常总结及解决办法","date":"2019-01-08T16:00:00.000Z","path":"2019/01/09/sparkExceptions/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"}]},{"title":"Spark Checkpoint 使用及源码浅析","date":"2019-01-06T16:00:00.000Z","path":"2019/01/07/sparkCheckPoint/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"持久化","slug":"持久化","permalink":"https://dongkelun.com/tags/持久化/"},{"name":"cache","slug":"cache","permalink":"https://dongkelun.com/tags/cache/"},{"name":"persist","slug":"persist","permalink":"https://dongkelun.com/tags/persist/"}]},{"title":"Linux 安装 oh-my-zsh","date":"2018-12-28T16:00:00.000Z","path":"2018/12/29/linux-oh-my-zsh/","categories":[],"tags":[{"name":"Linux","slug":"Linux","permalink":"https://dongkelun.com/tags/Linux/"},{"name":"zsh","slug":"zsh","permalink":"https://dongkelun.com/tags/zsh/"}]},{"title":"Spark SQL 优化笔记","date":"2018-12-25T16:00:00.000Z","path":"2018/12/26/sparkSqlOptimize/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"SQL","slug":"SQL","permalink":"https://dongkelun.com/tags/SQL/"}]},{"title":"Scala学习笔记","date":"2018-12-13T16:00:00.000Z","path":"2018/12/14/scalaLearningNotes/","categories":[],"tags":[{"name":"Scala","slug":"Scala","permalink":"https://dongkelun.com/tags/Scala/"}]},{"title":"Hive分桶表学习总结","date":"2018-12-06T16:00:00.000Z","path":"2018/12/07/hiveBucketTable/","categories":[],"tags":[{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"bucket","slug":"bucket","permalink":"https://dongkelun.com/tags/bucket/"}]},{"title":"Spark操作Hive分区表","date":"2018-12-03T16:00:00.000Z","path":"2018/12/04/sparkHivePatition/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Partition","slug":"Partition","permalink":"https://dongkelun.com/tags/Partition/"}]},{"title":"Hive内部表和外部表","date":"2018-12-02T16:00:00.000Z","path":"2018/12/03/hiveInternalAndExternalTables/","categories":[],"tags":[{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"}]},{"title":"Hive分区表学习总结","date":"2018-12-01T16:00:00.000Z","path":"2018/12/02/hivePartitionTable/","categories":[],"tags":[{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"Partition","slug":"Partition","permalink":"https://dongkelun.com/tags/Partition/"}]},{"title":"Echarts中国地图三级钻取","date":"2018-11-26T16:00:00.000Z","path":"2018/11/27/echartsChinaMap/","categories":[],"tags":[{"name":"Echarts","slug":"Echarts","permalink":"https://dongkelun.com/tags/Echarts/"},{"name":"front-end","slug":"front-end","permalink":"https://dongkelun.com/tags/front-end/"}]},{"title":"打印(获取)HDFS路径下所有的文件名(包括子目录下的)","date":"2018-11-19T16:00:00.000Z","path":"2018/11/20/getAllHDFSFileNames/","categories":[],"tags":[{"name":"Scala","slug":"Scala","permalink":"https://dongkelun.com/tags/Scala/"},{"name":"HDFS","slug":"HDFS","permalink":"https://dongkelun.com/tags/HDFS/"}]},{"title":"通过Vue CLI 快速创建Vue项目并部署到tomcat","date":"2018-11-18T16:00:00.000Z","path":"2018/11/19/vueCliCreateProject/","categories":[],"tags":[{"name":"Vue","slug":"Vue","permalink":"https://dongkelun.com/tags/Vue/"},{"name":"Vue CLI","slug":"Vue-CLI","permalink":"https://dongkelun.com/tags/Vue-CLI/"}]},{"title":"Spark 通过 spark-submit 设置日志级别","date":"2018-11-15T16:00:00.000Z","path":"2018/11/16/sparkSubmitLogLevel/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"spark-submit","slug":"spark-submit","permalink":"https://dongkelun.com/tags/spark-submit/"}]},{"title":"Centos7 Tomcat9 安装笔记","date":"2018-10-22T16:00:00.000Z","path":"2018/10/23/tomcatConf/","categories":[],"tags":[{"name":"tomcat","slug":"tomcat","permalink":"https://dongkelun.com/tags/tomcat/"}]},{"title":"Spark性能优化:基于分区进行操作","date":"2018-09-01T16:00:00.000Z","path":"2018/09/02/sparkMapPartitions/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Partitions","slug":"Partitions","permalink":"https://dongkelun.com/tags/Partitions/"},{"name":"性能优化","slug":"性能优化","permalink":"https://dongkelun.com/tags/性能优化/"}]},{"title":"利用Spark实现Oracle到Hive的历史数据同步","date":"2018-08-26T16:00:00.000Z","path":"2018/08/27/sparkOracle2Hive/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Oracle","slug":"Oracle","permalink":"https://dongkelun.com/tags/Oracle/"},{"name":"Hive","slug":"Hive","permalink":"https://dongkelun.com/tags/Hive/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"}]},{"title":"Spark通过修改DataFrame的schema给表字段添加注释","date":"2018-08-19T16:00:00.000Z","path":"2018/08/20/sparkDfAddComments/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"}]},{"title":"Spark创建空的DataFrame","date":"2018-08-13T16:00:00.000Z","path":"2018/08/14/sparkEmptyDataFrame/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"}]},{"title":"Spark 创建RDD、DataFrame各种情况的默认分区数","date":"2018-08-12T16:00:00.000Z","path":"2018/08/13/sparkDefaultPartitionNums/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"},{"name":"Partition","slug":"Partition","permalink":"https://dongkelun.com/tags/Partition/"},{"name":"Rdd","slug":"Rdd","permalink":"https://dongkelun.com/tags/Rdd/"}]},{"title":"Spark UDF使用详解及代码示例","date":"2018-08-01T16:00:00.000Z","path":"2018/08/02/sparkUDF/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"},{"name":"UDF","slug":"UDF","permalink":"https://dongkelun.com/tags/UDF/"}]},{"title":"通过数据库客户端界面工具DBeaver连接Hive","date":"2018-07-12T16:00:00.000Z","path":"2018/07/13/dbeaverConnectHive/","categories":[],"tags":[{"name":"界面工具","slug":"界面工具","permalink":"https://dongkelun.com/tags/界面工具/"},{"name":"hive","slug":"hive","permalink":"https://dongkelun.com/tags/hive/"}]},{"title":"HDFS DataNode启动异常:/opt/jdk1.8.0_151/bin/java:权限不够","date":"2018-07-09T16:00:00.000Z","path":"2018/07/10/HadoopException/","categories":[],"tags":[{"name":"Hadoop","slug":"Hadoop","permalink":"https://dongkelun.com/tags/Hadoop/"},{"name":"ambari","slug":"ambari","permalink":"https://dongkelun.com/tags/ambari/"}]},{"title":"ambari 异常总结及解决办法","date":"2018-07-09T16:00:00.000Z","path":"2018/07/10/ambariExceptions/","categories":[],"tags":[{"name":"ambari","slug":"ambari","permalink":"https://dongkelun.com/tags/ambari/"},{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"}]},{"title":"spark-submit报错:Application application_1529650293575_0148 finished with failed status","date":"2018-07-05T16:00:00.000Z","path":"2018/07/06/sparkSubmitException1/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"spark-submit","slug":"spark-submit","permalink":"https://dongkelun.com/tags/spark-submit/"}]},{"title":"Spark DataFrame按某列降序排序","date":"2018-07-03T16:00:00.000Z","path":"2018/07/04/sparkDfSortDesc/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"}]},{"title":"Spark获取当前分区的partitionId","date":"2018-06-27T16:00:00.000Z","path":"2018/06/28/sparkGetPartitionId/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Partition","slug":"Partition","permalink":"https://dongkelun.com/tags/Partition/"}]},{"title":"SparkStreaming+Kafka 实现统计基于缓存的实时uv","date":"2018-06-24T16:00:00.000Z","path":"2018/06/25/KafkaUV/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"SparkStreaming","slug":"SparkStreaming","permalink":"https://dongkelun.com/tags/SparkStreaming/"},{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"}]},{"title":"通过offsets.retention.minutes设置kafka offset的过期时间","date":"2018-06-20T16:00:00.000Z","path":"2018/06/21/modifyKafkaOffsetTime/","categories":[],"tags":[{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"}]},{"title":"Spark Streaming+Kafka提交offset实现有且仅有一次(exactly-once)","date":"2018-06-19T16:00:00.000Z","path":"2018/06/20/sparkStreamingOffsetOnlyOnce/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"},{"name":"Spark Streaming","slug":"Spark-Streaming","permalink":"https://dongkelun.com/tags/Spark-Streaming/"}]},{"title":"spark-submit提交Spark Streaming+Kafka程序","date":"2018-06-18T16:00:00.000Z","path":"2018/06/19/sparkSubmitKafka/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"},{"name":"Spark Streaming","slug":"Spark-Streaming","permalink":"https://dongkelun.com/tags/Spark-Streaming/"},{"name":"spark-submit","slug":"spark-submit","permalink":"https://dongkelun.com/tags/spark-submit/"}]},{"title":"SparkStreaming+Kafka 实现基于缓存的实时wordcount程序","date":"2018-06-13T16:00:00.000Z","path":"2018/06/14/updateStateBykeyWordCount/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"SparkStreaming","slug":"SparkStreaming","permalink":"https://dongkelun.com/tags/SparkStreaming/"},{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"}]},{"title":"Spark架构原理","date":"2018-06-08T16:00:00.000Z","path":"2018/06/09/sparkArchitecturePrinciples/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"原理","slug":"原理","permalink":"https://dongkelun.com/tags/原理/"}]},{"title":"Spark 持久化(cache和persist的区别)","date":"2018-06-02T16:00:00.000Z","path":"2018/06/03/sparkCacheAndPersist/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"持久化","slug":"持久化","permalink":"https://dongkelun.com/tags/持久化/"},{"name":"cache","slug":"cache","permalink":"https://dongkelun.com/tags/cache/"},{"name":"persist","slug":"persist","permalink":"https://dongkelun.com/tags/persist/"}]},{"title":"Scala日期操作","date":"2018-05-31T16:00:00.000Z","path":"2018/06/01/scalaDate/","categories":[],"tags":[{"name":"scala","slug":"scala","permalink":"https://dongkelun.com/tags/scala/"},{"name":"日期","slug":"日期","permalink":"https://dongkelun.com/tags/日期/"}]},{"title":"Spark读取压缩文件","date":"2018-05-29T16:00:00.000Z","path":"2018/05/30/sparkGZ/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"压缩文件","slug":"压缩文件","permalink":"https://dongkelun.com/tags/压缩文件/"},{"name":"编码问题","slug":"编码问题","permalink":"https://dongkelun.com/tags/编码问题/"}]},{"title":"如何解决Spark开发中遇到需要去掉文件前几行数据的问题","date":"2018-05-26T16:00:00.000Z","path":"2018/05/27/sparkDelFirstNLines/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"rdd","slug":"rdd","permalink":"https://dongkelun.com/tags/rdd/"}]},{"title":"利用ogg实现oracle到kafka的增量数据实时同步","date":"2018-05-22T16:00:00.000Z","path":"2018/05/23/oggOracle2Kafka/","categories":[],"tags":[{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"},{"name":"ogg","slug":"ogg","permalink":"https://dongkelun.com/tags/ogg/"}]},{"title":"Kafka安装启动入门教程","date":"2018-05-20T16:00:00.000Z","path":"2018/05/21/kafkaConf/","categories":[],"tags":[{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"},{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"}]},{"title":"hive查询报错:java.io.IOException:org.apache.parquet.io.ParquetDecodingException","date":"2018-05-19T16:00:00.000Z","path":"2018/05/20/hiveQueryException/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"hive","slug":"hive","permalink":"https://dongkelun.com/tags/hive/"},{"name":"异常解决","slug":"异常解决","permalink":"https://dongkelun.com/tags/异常解决/"}]},{"title":"Spark Streaming连接Kafka入门教程","date":"2018-05-16T16:00:00.000Z","path":"2018/05/17/sparkKafka/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Kafka","slug":"Kafka","permalink":"https://dongkelun.com/tags/Kafka/"},{"name":"Spark Streaming","slug":"Spark-Streaming","permalink":"https://dongkelun.com/tags/Spark-Streaming/"}]},{"title":"spark ML之特征处理(1)","date":"2018-05-16T16:00:00.000Z","path":"2018/05/17/sparkMlFeatureProcessing1/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"ml","slug":"ml","permalink":"https://dongkelun.com/tags/ml/"}]},{"title":"旧版Spark(1.6版本) 将RDD动态转为DataFrame","date":"2018-05-10T16:00:00.000Z","path":"2018/05/11/rdd2df/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"},{"name":"Rdd","slug":"Rdd","permalink":"https://dongkelun.com/tags/Rdd/"}]},{"title":"spark-submit报错:Exception in thread \"main\" java.sql.SQLException:No suitable driver","date":"2018-05-05T16:00:00.000Z","path":"2018/05/06/sparkSubmitException/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"spark-submit","slug":"spark-submit","permalink":"https://dongkelun.com/tags/spark-submit/"}]},{"title":"Centos7 安装oracle11","date":"2018-05-04T16:00:00.000Z","path":"2018/05/05/oracleConf/","categories":[],"tags":[{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"},{"name":"oracle","slug":"oracle","permalink":"https://dongkelun.com/tags/oracle/"}]},{"title":"Spark 将DataFrame所有的列类型改为double","date":"2018-04-26T16:00:00.000Z","path":"2018/04/27/dfChangeAllColDatatypes/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"DataFrame","slug":"DataFrame","permalink":"https://dongkelun.com/tags/DataFrame/"}]},{"title":"centos7 ambari2.6.1.5+hdp2.6.4.0 大数据集群安装部署","date":"2018-04-24T16:00:00.000Z","path":"2018/04/25/ambariConf/","categories":[],"tags":[{"name":"ambari","slug":"ambari","permalink":"https://dongkelun.com/tags/ambari/"},{"name":"centos7","slug":"centos7","permalink":"https://dongkelun.com/tags/centos7/"}]},{"title":"spark on yarn 配置及异常解决","date":"2018-04-15T16:00:00.000Z","path":"2018/04/16/sparkOnYarnConf/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"yarn","slug":"yarn","permalink":"https://dongkelun.com/tags/yarn/"}]},{"title":"Spark 统计每天新增用户数","date":"2018-04-10T16:00:00.000Z","path":"2018/04/11/sparkNewUV/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Scala","slug":"Scala","permalink":"https://dongkelun.com/tags/Scala/"},{"name":"面试题","slug":"面试题","permalink":"https://dongkelun.com/tags/面试题/"}]},{"title":"spark ML算法之线性回归使用","date":"2018-04-08T16:00:00.000Z","path":"2018/04/09/sparkMlLinearRegressionUsing/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"算法","slug":"算法","permalink":"https://dongkelun.com/tags/算法/"},{"name":"ml","slug":"ml","permalink":"https://dongkelun.com/tags/ml/"}]},{"title":"CentOS 初始环境配置","date":"2018-04-04T16:00:00.000Z","path":"2018/04/05/centosInitialConf/","categories":[],"tags":[{"name":"centos","slug":"centos","permalink":"https://dongkelun.com/tags/centos/"}]},{"title":"centos7 hadoop 集群安装配置","date":"2018-04-04T16:00:00.000Z","path":"2018/04/05/hadoopClusterConf/","categories":[],"tags":[{"name":"Hadoop","slug":"Hadoop","permalink":"https://dongkelun.com/tags/Hadoop/"},{"name":"centos","slug":"centos","permalink":"https://dongkelun.com/tags/centos/"}]},{"title":"linux ssh 免密登录","date":"2018-04-04T16:00:00.000Z","path":"2018/04/05/sshConf/","categories":[],"tags":[{"name":"Linux","slug":"Linux","permalink":"https://dongkelun.com/tags/Linux/"}]},{"title":"scala 两个map合并,key相同时value相加","date":"2018-03-31T16:00:00.000Z","path":"2018/04/01/scalaMapAdd/","categories":[],"tags":[{"name":"scala","slug":"scala","permalink":"https://dongkelun.com/tags/scala/"}]},{"title":"scala 下划线使用指南","date":"2018-03-28T16:00:00.000Z","path":"2018/03/29/scalaUnderscoreUseGuide/","categories":[],"tags":[{"name":"scala","slug":"scala","permalink":"https://dongkelun.com/tags/scala/"},{"name":"转载","slug":"转载","permalink":"https://dongkelun.com/tags/转载/"}]},{"title":"Spark连接Hive(spark-shell和Eclipse两种方式)","date":"2018-03-24T16:00:00.000Z","path":"2018/03/25/sparkHive/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"hive","slug":"hive","permalink":"https://dongkelun.com/tags/hive/"}]},{"title":"centos7 hive 单机模式安装配置","date":"2018-03-23T16:00:00.000Z","path":"2018/03/24/hiveConf/","categories":[],"tags":[{"name":"centos","slug":"centos","permalink":"https://dongkelun.com/tags/centos/"},{"name":"hive","slug":"hive","permalink":"https://dongkelun.com/tags/hive/"}]},{"title":"centos7 hadoop 单机模式安装配置","date":"2018-03-22T16:00:00.000Z","path":"2018/03/23/hadoopConf/","categories":[],"tags":[{"name":"Hadoop","slug":"Hadoop","permalink":"https://dongkelun.com/tags/Hadoop/"},{"name":"centos","slug":"centos","permalink":"https://dongkelun.com/tags/centos/"}]},{"title":"连接mysql报错:Exception in thread \"main\" java.sql.SQLException:The server time zone value 'EDT' is unrecognized or represents more than one time zone","date":"2018-03-21T16:00:00.000Z","path":"2018/03/22/mysqlTimeZoneErr/","categories":[],"tags":[{"name":"mysql","slug":"mysql","permalink":"https://dongkelun.com/tags/mysql/"}]},{"title":"Spark Sql 连接mysql","date":"2018-03-20T16:00:00.000Z","path":"2018/03/21/sparkMysql/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Scala","slug":"Scala","permalink":"https://dongkelun.com/tags/Scala/"}]},{"title":"win10 spark+scala+eclipse+sbt 安装配置","date":"2018-03-14T16:00:00.000Z","path":"2018/03/15/winSparkConf/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"},{"name":"Scala","slug":"Scala","permalink":"https://dongkelun.com/tags/Scala/"}]},{"title":"Spark基本概念(便于自己随时查阅--摘自Spark快速大数据分析)","date":"2018-01-22T16:00:00.000Z","path":"2018/01/23/sparkBasicConcept/","categories":[],"tags":[{"name":"Spark","slug":"Spark","permalink":"https://dongkelun.com/tags/Spark/"}]},{"title":"ibatis 事务 java","date":"2018-01-21T16:00:00.000Z","path":"2018/01/22/ibatisTransaction/","categories":[],"tags":[{"name":"java","slug":"java","permalink":"https://dongkelun.com/tags/java/"},{"name":"ibatis","slug":"ibatis","permalink":"https://dongkelun.com/tags/ibatis/"}]},{"title":"network is unreachable centos无法连接外网(或unknown host baidu.com)","date":"2018-01-16T16:00:00.000Z","path":"2018/01/17/networkIsUnreachable/","categories":[],"tags":[{"name":"centos","slug":"centos","permalink":"https://dongkelun.com/tags/centos/"}]},{"title":"vmware centos7 设置固定ip","date":"2018-01-15T16:00:00.000Z","path":"2018/01/16/vmwareSetFixIP/","categories":[],"tags":[{"name":"vmware","slug":"vmware","permalink":"https://dongkelun.com/tags/vmware/"}]},{"title":"Redis Cluster 安装配置","date":"2018-01-08T16:00:00.000Z","path":"2018/01/09/redisClusterDeployment/","categories":[],"tags":[{"name":"redis","slug":"redis","permalink":"https://dongkelun.com/tags/redis/"}]}]