瀏覽代碼

移动了一些工具类位置,增加了 河北客户体验管理智能定责投诉明细月累计接口日 数据入库

lifuquan 1 年之前
父節點
當前提交
da9a3b4d38
共有 42 個文件被更改,包括 1402 次插入231 次删除
  1. 925 0
      doc/开发环境/SQL语句备份/tsl_data-bk-20231225.sql
  2. 131 0
      doc/数据订阅/能力商店/河北客户体验管理智能定责投诉明细月累计接口日/complaint_details_fix_ywd_day表相关语句.md
  3. 二進制
      doc/数据订阅/能力商店/河北客户体验管理智能定责投诉明细月累计接口日/河北客户体验管理智能定责投诉明细月累计接口日.xlsx
  4. 二進制
      doc/立项材料/钉钉群报表自动化2024年适配需求-20231225/[河北联通]关于投诉工单日报表2024年适配的需求.docx
  5. 34 0
      doc/部署环境/用户更新.md
  6. 0 39
      doc/需求文档/20231219-新需求.md
  7. 二進制
      doc/需求文档/2024年适配/2024年适配-工作量.xlsx
  8. 55 0
      doc/需求文档/2024年适配/关于投诉工单日报表2024年适配的需求.md
  9. 3 2
      src/main/java/com/nokia/tsl_data/config/RequestLogConfig.java
  10. 1 1
      src/main/java/com/nokia/tsl_data/controller/DataWarehouseController.java
  11. 1 1
      src/main/java/com/nokia/tsl_data/controller/ReportGenerateController.java
  12. 83 35
      src/main/java/com/nokia/tsl_data/dao/TslDataDao.java
  13. 2 1
      src/main/java/com/nokia/tsl_data/entity/TaskRecord.java
  14. 1 1
      src/main/java/com/nokia/tsl_data/entity/converter/JSONObjectConverter.java
  15. 1 1
      src/main/java/com/nokia/tsl_data/entity/vo/R.java
  16. 7 0
      src/main/java/com/nokia/tsl_data/properties/DataWarehouseProperties.java
  17. 1 1
      src/main/java/com/nokia/tsl_data/scheduling/controller/RegisteredTaskController.java
  18. 1 1
      src/main/java/com/nokia/tsl_data/scheduling/controller/ScheduledTaskController.java
  19. 1 1
      src/main/java/com/nokia/tsl_data/scheduling/service/RegisteredTaskService.java
  20. 1 1
      src/main/java/com/nokia/tsl_data/scheduling/service/SchedulingService.java
  21. 98 39
      src/main/java/com/nokia/tsl_data/service/DataWarehouseService.java
  22. 2 2
      src/main/java/com/nokia/tsl_data/service/TaskService.java
  23. 1 0
      src/main/java/com/nokia/tsl_data/service/TslDataService.java
  24. 10 7
      src/main/java/com/nokia/tsl_data/service/TslReportService.java
  25. 13 30
      src/main/java/com/nokia/tsl_data/util/CodecUtil.java
  26. 1 1
      src/main/java/com/nokia/tsl_data/util/DateUtil.java
  27. 1 1
      src/main/java/com/nokia/tsl_data/util/InstantUtil.java
  28. 1 1
      src/main/java/com/nokia/tsl_data/util/SnowFlakeUtil.java
  29. 1 1
      src/main/java/com/nokia/tsl_data/util/TextUtil.java
  30. 1 1
      src/main/java/com/nokia/tsl_data/util/excel/entity/AlignmentEnum.java
  31. 1 1
      src/main/java/com/nokia/tsl_data/util/excel/entity/CellInfo.java
  32. 1 1
      src/main/java/com/nokia/tsl_data/util/excel/entity/CellRect.java
  33. 1 1
      src/main/java/com/nokia/tsl_data/util/excel/entity/Gradient.java
  34. 1 1
      src/main/java/com/nokia/tsl_data/util/excel/entity/ThreeColorGradient.java
  35. 1 1
      src/main/java/com/nokia/tsl_data/util/excel/entity/TwoColorGradient.java
  36. 6 5
      src/main/java/com/nokia/tsl_data/util/excel/poi/PoiUtil.java
  37. 4 3
      src/main/java/com/nokia/tsl_data/util/logging/RequestLogDispatcherServlet.java
  38. 3 2
      src/main/java/com/nokia/tsl_data/util/logging/RequestLogHandlerInterceptor.java
  39. 1 1
      src/main/java/com/nokia/tsl_data/util/logging/entity/RepeatableHttpServletRequestWrapper.java
  40. 1 1
      src/main/java/com/nokia/tsl_data/util/logging/entity/RepeatableHttpServletResponseWrapper.java
  41. 2 2
      src/main/resources/application.yml
  42. 3 44
      src/test/java/com/nokia/tsl_data/TslDataApplicationTest.java

+ 925 - 0
doc/开发环境/SQL语句备份/tsl_data-bk-20231225.sql

@@ -0,0 +1,925 @@
+--
+-- PostgreSQL database dump
+--
+
+-- Dumped from database version 12.10
+-- Dumped by pg_dump version 12.10
+
+SET statement_timeout = 0;
+SET lock_timeout = 0;
+SET idle_in_transaction_session_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = on;
+SELECT pg_catalog.set_config('search_path', '', false);
+SET check_function_bodies = false;
+SET xmloption = content;
+SET client_min_messages = warning;
+SET row_security = off;
+
+--
+-- Name: tsl_data; Type: SCHEMA; Schema: -; Owner: postgres
+--
+
+CREATE SCHEMA tsl_data;
+
+
+ALTER SCHEMA tsl_data OWNER TO postgres;
+
+SET default_tablespace = '';
+
+SET default_table_access_method = heap;
+
+--
+-- Name: avg_duration; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.avg_duration (
+    id bigint NOT NULL,
+    month_id character varying(6),
+    city_name character varying(10),
+    avg_duration double precision NOT NULL,
+    cteate_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.avg_duration OWNER TO postgres;
+
+--
+-- Name: TABLE avg_duration; Type: COMMENT; Schema: tsl_data; Owner: postgres
+--
+
+COMMENT ON TABLE tsl_data.avg_duration IS '地市_月_平均处理时长';
+
+
+--
+-- Name: avg_duration_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE SEQUENCE tsl_data.avg_duration_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1;
+
+
+ALTER TABLE tsl_data.avg_duration_id_seq OWNER TO postgres;
+
+--
+-- Name: avg_duration_id_seq; Type: SEQUENCE OWNED BY; Schema: tsl_data; Owner: postgres
+--
+
+ALTER SEQUENCE tsl_data.avg_duration_id_seq OWNED BY tsl_data.avg_duration.id;
+
+
+--
+-- Name: cron_task_record; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.cron_task_record (
+    id bigint NOT NULL,
+    create_date timestamp without time zone,
+    end_status character varying(255),
+    end_time timestamp without time zone,
+    last_update_date timestamp without time zone,
+    start_time timestamp without time zone,
+    task_info text,
+    task_name character varying(255),
+    time_cost bigint
+);
+
+
+ALTER TABLE tsl_data.cron_task_record OWNER TO postgres;
+
+--
+-- Name: cron_task_record_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE tsl_data.cron_task_record ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
+    SEQUENCE NAME tsl_data.cron_task_record_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1
+);
+
+
+--
+-- Name: high_quality_count_day; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.high_quality_count_day (
+    id bigint NOT NULL,
+    month_id character varying(6) NOT NULL,
+    day_id character varying(2) NOT NULL,
+    acct_date character varying(30) NOT NULL,
+    businoareaname character varying(15) NOT NULL,
+    profes_dep character varying(15) NOT NULL,
+    big_type_name character varying(60) NOT NULL,
+    small_type_name character varying(120) NOT NULL,
+    total_complaints character varying(30),
+    hotline_complaints character varying(30),
+    other_complaint character varying(30),
+    litigation_volume character varying(30),
+    satisfaction_rate character varying(30),
+    satisfaction_count character varying(30),
+    total_evaluation character varying(30),
+    complaint_satisfied character varying(30),
+    complaint_satisfied_list character varying(30),
+    complaint_satisfied_count character varying(30),
+    complaint_resolution character varying(30),
+    complaint_resolution_list character varying(30),
+    complaint_resolution_count character varying(30),
+    complaint_response character varying(30),
+    complaint_response_list character varying(30),
+    complaint_response_count character varying(30),
+    complaint character varying(30),
+    fault_satisfaction_rate character varying(30),
+    fault_satisfaction_list character varying(30),
+    fault_satisfaction_count character varying(30),
+    fault_resolution_rate character varying(30),
+    fault_resolution_list character varying(30),
+    fault_resolution_count character varying(30),
+    fault_response_rate character varying(30),
+    fault_response_list character varying(30),
+    fault_response_count character varying(30),
+    cteate_time timestamp without time zone DEFAULT now()
+);
+
+
+ALTER TABLE tsl_data.high_quality_count_day OWNER TO postgres;
+
+--
+-- Name: high_quality_count_day_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE SEQUENCE tsl_data.high_quality_count_day_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1;
+
+
+ALTER TABLE tsl_data.high_quality_count_day_id_seq OWNER TO postgres;
+
+--
+-- Name: high_quality_count_day_id_seq; Type: SEQUENCE OWNED BY; Schema: tsl_data; Owner: postgres
+--
+
+ALTER SEQUENCE tsl_data.high_quality_count_day_id_seq OWNED BY tsl_data.high_quality_count_day.id;
+
+
+--
+-- Name: high_quality_data; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.high_quality_data (
+    id bigint NOT NULL,
+    area_id character varying(50),
+    area_name character varying(50),
+    checked_city character varying(50),
+    checked_region character varying(50),
+    checked_region_reason character varying(50),
+    city_id character varying(50),
+    city_name character varying(50),
+    cp_is_ok character varying(50),
+    cp_satisfaction character varying(50),
+    cp_timely_contact character varying(50),
+    create_date timestamp without time zone,
+    day_id character varying(8) NOT NULL,
+    last_update_date timestamp without time zone,
+    no_visit_tag character varying(50),
+    sheet_no character varying(50) NOT NULL,
+    checked_city_reason character varying(50)
+);
+
+
+ALTER TABLE tsl_data.high_quality_data OWNER TO postgres;
+
+--
+-- Name: high_quality_data_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE tsl_data.high_quality_data ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
+    SEQUENCE NAME tsl_data.high_quality_data_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1
+);
+
+
+--
+-- Name: high_quality_list_day; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.high_quality_list_day (
+    id bigint NOT NULL,
+    month_id character varying(18),
+    day_id character varying(6),
+    contact_id character varying(150),
+    busino_area_name character varying(90),
+    busino_prov_name character varying(90),
+    sheet_no character varying(90),
+    busi_number character varying(120),
+    cust_star_name character varying(30),
+    sheet_type_name character varying(150),
+    serv_type_name character varying(900),
+    last_deal_content character varying(4136),
+    accept_time character varying(150),
+    archived_time character varying(150),
+    data_type_name character varying(300),
+    channel_name character varying(150),
+    profes_dep character varying(300),
+    big_type_name character varying(300),
+    small_type_name character varying(300),
+    is_dispatch_cloud character varying(30),
+    accept_channel_name character varying(300),
+    duty_reason_name character varying(900),
+    duty_major_name character varying(300),
+    is_online_complete character varying(15),
+    is_call_complete character varying(15),
+    is_cusser_complete character varying(15),
+    is_distr_complete character varying(15),
+    caller_number character varying(90),
+    compl_area_name character varying(60),
+    compl_prov_name character varying(60),
+    submit_channel character varying(300),
+    solved_result_desc character varying(4136),
+    cust_level_name character varying(300),
+    busi_type_name character varying(300),
+    urgent_level_name character varying(300),
+    important_type_name character varying(300),
+    is_upgrade character varying(15),
+    actual_total_len character varying(60),
+    nature_actual_total_len character varying(60),
+    cust_satis_desc character varying(300),
+    auto_is_ok character varying(60),
+    auto_cust_satis_desc character varying(300),
+    nonauto_is_ok_name character varying(15),
+    nonauto_cust_satis_desc character varying(150),
+    prod_type_name character varying(300),
+    proc_name character varying(600),
+    merge_satis_desc character varying(60),
+    serv_type_name_new character varying(1500),
+    is_svip_keyman character varying(6),
+    customer_label character varying(120),
+    prov_name character varying(180),
+    area_id character varying(30),
+    area_name character varying(150),
+    city_id character varying(60),
+    city_name character varying(300),
+    grid_id character varying(60),
+    grid_name character varying(300),
+    is_distri_area character varying(15),
+    cp_satisfaction character varying(60),
+    cp_is_ok character varying(60),
+    cp_timely_contact character varying(60),
+    cp_type character varying(300),
+    novisit_tag character varying(30),
+    serv_content character varying(4136),
+    gis_area character varying(60),
+    gis_area_name character varying(60),
+    gis_city character varying(60),
+    gis_city_name character varying(60),
+    use_gis character varying(3),
+    month_date character varying(18),
+    day_date character varying(6)
+);
+
+
+ALTER TABLE tsl_data.high_quality_list_day OWNER TO postgres;
+
+--
+-- Name: high_quality_list_day_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE SEQUENCE tsl_data.high_quality_list_day_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1;
+
+
+ALTER TABLE tsl_data.high_quality_list_day_id_seq OWNER TO postgres;
+
+--
+-- Name: high_quality_list_day_id_seq; Type: SEQUENCE OWNED BY; Schema: tsl_data; Owner: postgres
+--
+
+ALTER SEQUENCE tsl_data.high_quality_list_day_id_seq OWNED BY tsl_data.high_quality_list_day.id;
+
+
+--
+-- Name: mobile_complaint_day; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.mobile_complaint_day (
+    id bigint NOT NULL,
+    month_id character varying(6) NOT NULL,
+    day_id character varying(2) NOT NULL,
+    acct_date character varying(300) NOT NULL,
+    sheet_no character varying(150),
+    is_online_complete character varying(300),
+    contact_no character varying(300),
+    busi_no character varying(300),
+    serv_content character varying(4136),
+    last_deal_content character varying(4136),
+    deal_depart_name character varying(300),
+    deal_opinion character varying(4136),
+    serv_type character varying(600),
+    bus_type character varying(300),
+    duty_reason character varying(600),
+    accept_channel character varying(300),
+    submit_channel character varying(300),
+    compl_area_local character varying(300),
+    duty_major character varying(300),
+    product_name character varying(600),
+    sp_product_code character varying(600),
+    pre_repair_name character varying(300),
+    pre_repair_charges character varying(24),
+    fault_location character varying(300),
+    cust_level character varying(300),
+    satisfaction_in_reply character varying(300),
+    is_ok_in_reply character varying(300),
+    accept_time character varying(19),
+    end_time character varying(19),
+    proce_time character varying(19),
+    cust_area character varying(300),
+    is_cust_serv_complete character varying(300),
+    is_send_sheet_complete character varying(300),
+    is_repeat character varying(300),
+    is_upgrade character varying(300),
+    is_timeout character varying(300),
+    gis_city character varying(300),
+    process_nums character varying(24),
+    deal_depart_name_1 character varying(300),
+    deal_depart_name_2 character varying(300),
+    deal_depart_name_3 character varying(300),
+    first_call_back_time character varying(19),
+    proce_remark character varying(4136),
+    duty_major_day character varying(300),
+    duty_reason_id_day character varying(300),
+    duty_major_month character varying(300),
+    duty_reason_id_month character varying(300),
+    voice_text character varying(4136),
+    cteate_time timestamp without time zone DEFAULT now()
+);
+
+
+ALTER TABLE tsl_data.mobile_complaint_day OWNER TO postgres;
+
+--
+-- Name: mobile_complaint_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE SEQUENCE tsl_data.mobile_complaint_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1;
+
+
+ALTER TABLE tsl_data.mobile_complaint_id_seq OWNER TO postgres;
+
+--
+-- Name: mobile_complaint_id_seq; Type: SEQUENCE OWNED BY; Schema: tsl_data; Owner: postgres
+--
+
+ALTER SEQUENCE tsl_data.mobile_complaint_id_seq OWNED BY tsl_data.mobile_complaint_day.id;
+
+
+--
+-- Name: sys_data_dictionary; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.sys_data_dictionary (
+    id bigint NOT NULL,
+    create_date timestamp without time zone,
+    last_update_date timestamp without time zone,
+    nick_code character varying(50),
+    nick_name character varying(100),
+    real_code character varying(50),
+    real_name character varying(100),
+    type character varying(50),
+    ord integer,
+    parent_id bigint
+);
+
+
+ALTER TABLE tsl_data.sys_data_dictionary OWNER TO postgres;
+
+--
+-- Name: sys_data_dictionary_id_seq1; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE tsl_data.sys_data_dictionary ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
+    SEQUENCE NAME tsl_data.sys_data_dictionary_id_seq1
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1
+);
+
+
+--
+-- Name: target_ts_ratio; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.target_ts_ratio (
+    id bigint NOT NULL,
+    city_name character varying(50) NOT NULL,
+    create_date timestamp without time zone,
+    customer_target_ratio double precision NOT NULL,
+    last_update_date timestamp without time zone,
+    management_target_ratio double precision NOT NULL,
+    month_id character varying(8) NOT NULL
+);
+
+
+ALTER TABLE tsl_data.target_ts_ratio OWNER TO postgres;
+
+--
+-- Name: target_ts_ratio_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE tsl_data.target_ts_ratio ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
+    SEQUENCE NAME tsl_data.target_ts_ratio_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1
+);
+
+
+--
+-- Name: task_record; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.task_record (
+    id bigint NOT NULL,
+    create_date timestamp without time zone,
+    end_status integer,
+    end_time timestamp without time zone,
+    last_update_date timestamp without time zone,
+    start_time timestamp without time zone,
+    task_info text,
+    task_name character varying(255),
+    time_cost bigint
+);
+
+
+ALTER TABLE tsl_data.task_record OWNER TO postgres;
+
+--
+-- Name: task_record_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE tsl_data.task_record ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
+    SEQUENCE NAME tsl_data.task_record_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1
+);
+
+
+--
+-- Name: user_count; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.user_count (
+    id bigint NOT NULL,
+    city_name character varying(50) NOT NULL,
+    create_date timestamp without time zone,
+    customer_user_count double precision,
+    last_update_date timestamp without time zone,
+    management_user_count double precision,
+    month_id character varying(8) NOT NULL
+);
+
+
+ALTER TABLE tsl_data.user_count OWNER TO postgres;
+
+--
+-- Name: user_count_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE tsl_data.user_count ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
+    SEQUENCE NAME tsl_data.user_count_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1
+);
+
+
+--
+-- Name: work_flow_basic_data; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data (
+    id bigint NOT NULL,
+    city_id character varying(50),
+    create_date timestamp without time zone,
+    kfsn character varying(50) NOT NULL,
+    last_update_date timestamp without time zone,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_id_seq; Type: SEQUENCE; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE tsl_data.work_flow_basic_data ALTER COLUMN id ADD GENERATED BY DEFAULT AS IDENTITY (
+    SEQUENCE NAME tsl_data.work_flow_basic_data_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1
+);
+
+
+--
+-- Name: work_flow_basic_data_temp_1703433900116; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703433900116 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703433900116 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703437500106; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703437500106 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703437500106 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703441100143; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703441100143 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703441100143 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703444700110; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703444700110 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703444700110 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703448300101; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703448300101 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703448300101 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703451900113; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703451900113 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703451900113 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703455500188; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703455500188 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703455500188 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703459100110; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703459100110 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703459100110 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703462700104; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703462700104 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703462700104 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703466300104; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703466300104 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703466300104 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703469900114; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703469900114 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703469900114 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703473500109; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703473500109 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703473500109 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703477100107; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703477100107 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703477100107 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703480700107; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703480700107 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703480700107 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703484300146; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703484300146 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703484300146 OWNER TO postgres;
+
+--
+-- Name: work_flow_basic_data_temp_1703487900106; Type: TABLE; Schema: tsl_data; Owner: postgres
+--
+
+CREATE TABLE tsl_data.work_flow_basic_data_temp_1703487900106 (
+    city_id character varying(50),
+    kfsn character varying(50) NOT NULL,
+    region_id character varying(50),
+    work_flow_create_time timestamp without time zone,
+    work_flow_update_time timestamp without time zone
+);
+
+
+ALTER TABLE tsl_data.work_flow_basic_data_temp_1703487900106 OWNER TO postgres;
+
+--
+-- Name: avg_duration id; Type: DEFAULT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.avg_duration ALTER COLUMN id SET DEFAULT nextval('tsl_data.avg_duration_id_seq'::regclass);
+
+
+--
+-- Name: high_quality_count_day id; Type: DEFAULT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.high_quality_count_day ALTER COLUMN id SET DEFAULT nextval('tsl_data.high_quality_count_day_id_seq'::regclass);
+
+
+--
+-- Name: high_quality_list_day id; Type: DEFAULT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.high_quality_list_day ALTER COLUMN id SET DEFAULT nextval('tsl_data.high_quality_list_day_id_seq'::regclass);
+
+
+--
+-- Name: mobile_complaint_day id; Type: DEFAULT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.mobile_complaint_day ALTER COLUMN id SET DEFAULT nextval('tsl_data.mobile_complaint_id_seq'::regclass);
+
+
+--
+-- Name: cron_task_record cron_task_record_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.cron_task_record
+    ADD CONSTRAINT cron_task_record_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: high_quality_count_day high_quality_count_day_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.high_quality_count_day
+    ADD CONSTRAINT high_quality_count_day_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: high_quality_data high_quality_data_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.high_quality_data
+    ADD CONSTRAINT high_quality_data_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: high_quality_list_day high_quality_list_day_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.high_quality_list_day
+    ADD CONSTRAINT high_quality_list_day_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: mobile_complaint_day mobile_complaint_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.mobile_complaint_day
+    ADD CONSTRAINT mobile_complaint_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: user_count month_city_unique; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.user_count
+    ADD CONSTRAINT month_city_unique UNIQUE (month_id, city_name);
+
+
+--
+-- Name: sys_data_dictionary sys_data_dictionary_pkey1; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.sys_data_dictionary
+    ADD CONSTRAINT sys_data_dictionary_pkey1 PRIMARY KEY (id);
+
+
+--
+-- Name: target_ts_ratio target_ts_ratio_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.target_ts_ratio
+    ADD CONSTRAINT target_ts_ratio_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: task_record task_record_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.task_record
+    ADD CONSTRAINT task_record_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: user_count user_count_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.user_count
+    ADD CONSTRAINT user_count_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: work_flow_basic_data work_flow_basic_data_pkey; Type: CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.work_flow_basic_data
+    ADD CONSTRAINT work_flow_basic_data_pkey PRIMARY KEY (id);
+
+
+--
+-- Name: sys_data_dictionary fk77q91mf3guu88esemsn77afkd; Type: FK CONSTRAINT; Schema: tsl_data; Owner: postgres
+--
+
+ALTER TABLE ONLY tsl_data.sys_data_dictionary
+    ADD CONSTRAINT fk77q91mf3guu88esemsn77afkd FOREIGN KEY (parent_id) REFERENCES tsl_data.sys_data_dictionary(id);
+
+
+--
+-- PostgreSQL database dump complete
+--
+

+ 131 - 0
doc/数据订阅/能力商店/河北客户体验管理智能定责投诉明细月累计接口日/complaint_details_fix_ywd_day表相关语句.md

@@ -0,0 +1,131 @@
+# complaint_details_fix_ywd_day
+
+## 插入语句
+
+```sql
+INSERT INTO tsl_data.complaint_details_fix_ywd_day
+(id, month_id, day_id, month_id1, day_id1, sheet_no, is_online_complete, contact_no, busi_no, serv_content, last_deal_content, deal_depart_name, deal_user, deal_opinion, complete_user_code, serv_type, bus_type, duty_reason, problem_duty, problem_solving_con, code_urgent_level, code_important_type, cust_name, accept_user, cust_level, accept_channel, submit_channel, compl_area_local, compl_city_local, compl_grid_local, cust_province, accept_time, end_time, cust_area, duty_major, is_call_center_complete, is_cust_serv_complete, is_send_sheet_complete, sp_code, sp_name, is_repeat, is_timeout, duty_dept, proce_user_code, proce_time, proce_depart_name, proce_remark, satisfaction, is_ok, is_ok_in_reply, product_name, pre_repair_name, pre_repair_charges, actual_total_len, timeout_len, timeout_depart_name, sp_product_code, sp_product_name, process_nums, deal_depart_name_1, deal_user_1, sended_user_1, deal_depart_name_2, deal_user_2, sended_user_2, deal_depart_name_3, deal_user_3, sended_user_3, deal_depart_name_end, deal_user_end, sended_depart_name_last, first_call_back_time, duty_reason_id_day, duty_major_day, duty_reason_id_month, duty_major_month, voice_text, fault_location, satisfaction_in_reply, is_upgrade, gis_city, data_source, code_answer_way, success_call_back, last_satisfaction, last_is_ok, design_profess, deal_unit, province_deal_unit, if_direct, is_proself, timely_contact, is_repeat_5times_m, is_repeat_2times_m, is_repeat_2times_2m, is_repeat_2times_3m, create_time)
+VALUES(0, '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', now());
+```
+
+## 建表语句
+
+```sql
+CREATE TABLE tsl_data.complaint_details_fix_ywd_day (
+  id bigint NOT NULL,
+  month_id character varying(6),
+  day_id character varying(6),
+  month_id1 character varying(18),
+  day_id1 character varying(6),
+  sheet_no character varying(150),
+  is_online_complete character varying(6),
+  contact_no character varying(150),
+  busi_no character varying(150),
+  serv_content character varying(4136),
+  last_deal_content character varying(4136),
+  deal_depart_name character varying(300),
+  deal_user character varying(60),
+  deal_opinion character varying(4136),
+  complete_user_code character varying(150),
+  serv_type character varying(600),
+  bus_type character varying(30),
+  duty_reason character varying(600),
+  problem_duty character varying(300),
+  problem_solving_con character varying(300),
+  code_urgent_level character varying(300),
+  code_important_type character varying(300),
+  cust_name character varying(300),
+  accept_user character varying(600),
+  cust_level character varying(300),
+  accept_channel character varying(300),
+  submit_channel character varying(300),
+  compl_area_local character varying(300),
+  compl_city_local character varying(90),
+  compl_grid_local character varying(210),
+  cust_province character varying(300),
+  accept_time character varying(300),
+  end_time character varying(300),
+  cust_area character varying(300),
+  duty_major character varying(300),
+  is_call_center_complete character varying(6),
+  is_cust_serv_complete character varying(6),
+  is_send_sheet_complete character varying(6),
+  sp_code character varying(600),
+  sp_name character varying(600),
+  is_repeat character varying(50),
+  is_timeout character varying(50),
+  duty_dept character varying(300),
+  proce_user_code character varying(60),
+  proce_time character varying(300),
+  proce_depart_name character varying(1500),
+  proce_remark character varying(4136),
+  satisfaction character varying(50),
+  is_ok character varying(50),
+  is_ok_in_reply character varying(300),
+  product_name character varying(600),
+  pre_repair_name character varying(60),
+  pre_repair_charges character varying(60),
+  actual_total_len character varying(60),
+  timeout_len character varying(60),
+  timeout_depart_name character varying(300),
+  sp_product_code character varying(600),
+  sp_product_name character varying(600),
+  process_nums character varying(60),
+  deal_depart_name_1 character varying(300),
+  deal_user_1 character varying(150),
+  sended_user_1 character varying(150),
+  deal_depart_name_2 character varying(300),
+  deal_user_2 character varying(150),
+  sended_user_2 character varying(150),
+  deal_depart_name_3 character varying(300),
+  deal_user_3 character varying(150),
+  sended_user_3 character varying(150),
+  deal_depart_name_end character varying(300),
+  deal_user_end character varying(60),
+  sended_depart_name_last character varying(300),
+  first_call_back_time character varying(60),
+  duty_reason_id_day character varying(300),
+  duty_major_day character varying(50),
+  duty_reason_id_month character varying(300),
+  duty_major_month character varying(50),
+  voice_text character varying(4136),
+  fault_location character varying(500),
+  satisfaction_in_reply character varying(300),
+  is_upgrade character varying(6),
+  gis_city character varying(210),
+  data_source character varying(60),
+  code_answer_way character varying(15),
+  success_call_back character varying(50),
+  last_satisfaction character varying(50),
+  last_is_ok character varying(50),
+  design_profess character varying(300),
+  deal_unit character varying(150),
+  province_deal_unit character varying(150),
+  if_direct character varying(50),
+  is_proself character varying(50),
+  timely_contact character varying(50),
+  is_repeat_5times_m character varying(50),
+  is_repeat_2times_m character varying(50),
+  is_repeat_2times_2m character varying(50),
+  is_repeat_2times_3m character varying(50),
+  create_time timestamp without time zone DEFAULT now()
+)
+
+-- 创建seq
+CREATE SEQUENCE tsl_data.complaint_details_fix_ywd_day_id_seq
+    START WITH 1
+    INCREMENT BY 1
+    NO MINVALUE
+    NO MAXVALUE
+    CACHE 1;
+
+-- 将seq指配给id字段
+ALTER SEQUENCE tsl_data.complaint_details_fix_ywd_day_id_seq OWNED BY tsl_data.complaint_details_fix_ywd_day.id;
+
+-- id 的default值
+ALTER TABLE ONLY tsl_data.complaint_details_fix_ywd_day ALTER COLUMN id SET DEFAULT nextval('tsl_data.complaint_details_fix_ywd_day_id_seq'::regclass);
+
+-- 将id作为主键
+ALTER TABLE ONLY tsl_data.complaint_details_fix_ywd_day
+    ADD CONSTRAINT complaint_details_fix_ywd_day_pkey PRIMARY KEY (id);
+```

二進制
doc/数据订阅/能力商店/河北客户体验管理智能定责投诉明细月累计接口日/河北客户体验管理智能定责投诉明细月累计接口日.xlsx


二進制
doc/立项材料/钉钉群报表自动化2024年适配需求-20231225/[河北联通]关于投诉工单日报表2024年适配的需求.docx


+ 34 - 0
doc/部署环境/用户更新.md

@@ -0,0 +1,34 @@
+# 用户更新
+
+```java
+@Autowired
+private UserCountService userCountService;
+
+/**
+ * 更新管理端用户数--当前阶段需要修改一下模式
+ */
+@Test
+void test1() throws IOException {
+    String path = "D:/src/管理端用户数.txt";
+    Files.lines(Paths.get(path), StandardCharsets.UTF_8)
+            .forEach(line -> {
+                String[] split = line.split("\t");
+                System.out.println(split[0] + Double.parseDouble(split[2]));
+                userCountService.updateManagementUserCount("202311", split[0], Double.parseDouble(split[2]));
+            });
+}
+
+/**
+ * 更新客户端用户数
+ */
+@Test
+void test2() throws IOException {
+    String path = "D:/src/客户端用户数.txt";
+    Files.lines(Paths.get(path), StandardCharsets.UTF_8)
+            .forEach(line -> {
+                String[] split = line.split("\t");
+                System.out.println(split[0] + Double.parseDouble(split[1]));
+                userCountService.updateCustomerUserCount("202311", split[0], Double.parseDouble(split[1]));
+            });
+}
+```

+ 0 - 39
doc/需求文档/20231219-新需求.md

@@ -1,39 +0,0 @@
-# 新需求记录
-
-## 需求提出日期
-
-2023年12月19日
-
-## 需求概述
-
-2024年开始由于考核内容有变需要调整日报表以适应新需求
-
-## 需求详述
-
-### 管理端-移网感知类调整
-
-1. 管理端-移网感知类:数据源在原有基础上增加(需提供目标值),核实数据源问题
-日定责问题分类
-业务使用>>基础业务使用>>移网主被叫>>手机无法主被叫
-
-4. 重复投诉数据源更换成移网感知类
-   
-> 截止到2023年12月20日,数据源仍存在问题,无法启动开发
-
-### 取消sheet2
-
-2. sheet2客户端取消,
-
-### 新增 服务请求压降
-
-3. 增加服务请求压降:数据源是移网网络体验明细,计算方式同移网感知类(需提供目标值)
-
-> 需要订阅新数据--尚未提供新数据的服务
-
-### 投诉处理时长、超时工单概况调整
-
-5. 投诉处理时长、超时工单概况:数据源更换成进入TOP的工单(剔除退单),计算各地市的平均处理时长(归档时间-受理时间),如果归档时间缺失(……),()目标值36小时
-
-### 支撑地市日报功能
-
-7. 支撑地市发送日报功能,可实现地市更新区县后,

二進制
doc/需求文档/2024年适配/2024年适配-工作量.xlsx


+ 55 - 0
doc/需求文档/2024年适配/关于投诉工单日报表2024年适配的需求.md

@@ -0,0 +1,55 @@
+# 关于投诉工单日报表2024年适配的需求
+
+2023年12月19日
+
+## 1. 需求名称
+
+## 2. 适用范围
+
+河北省分公司
+
+## 3. 需求背景
+
+2024年开始由于考核内容有变需要调整日报表以适应新需求
+
+## 4. 名词或术语解释
+
+无
+
+## 5. 应用支撑现状
+
+## 6. 需求预期目标
+
+## 7. 需求描述
+
+### 7.1 管理端-移网感知类调整
+
+管理端-移网感知类:数据源在原有基础上增加(需提供目标值),核实数据源问题
+
+河北客户体验管理智能定责投诉明细月累计接口日
+
+日定责问题分类(duty_reason_id_month) 业务使用>>基础业务使用>>移网主被叫>>手机无法主被叫
+
+重复投诉数据源更换成移网感知类
+
+### 7.2 取消sheet2客户端考核
+
+### 7.3 新增 服务请求压降
+
+增加服务请求压降:数据源是移网网络体验明细,计算方式同移网感知类(需提供目标值)
+
+> 需要订阅新数据--尚未提供新数据的服务
+
+### 7.4 投诉处理时长、超时工单概况调整
+
+投诉处理时长、超时工单概况:数据源更换成进入TOP的工单(剔除退单),计算各地市的平均处理时长(归档时间-受理时间),如果归档时间缺失(……),()目标值36小时
+
+### 7.5 支撑地市日报功能
+
+支撑地市发送日报功能,可实现地市更新区县后,
+
+## 8. 业务推荐路线
+
+## 9. 时间要求
+
+期望上线时间为 2024年1月20日

+ 3 - 2
src/main/java/com/nokia/tsl_data/config/RequestLogConfig.java

@@ -1,7 +1,5 @@
 package com.nokia.tsl_data.config;
 
-import com.nokia.common.http.logging.RequestLogDispatcherServlet;
-import com.nokia.common.http.logging.RequestLogHandlerInterceptor;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.boot.autoconfigure.web.servlet.DispatcherServletAutoConfiguration;
 import org.springframework.context.annotation.Bean;
@@ -10,6 +8,9 @@ import org.springframework.web.servlet.DispatcherServlet;
 import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
 import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
 
+import com.nokia.tsl_data.util.logging.RequestLogDispatcherServlet;
+import com.nokia.tsl_data.util.logging.RequestLogHandlerInterceptor;
+
 @Configuration
 public class RequestLogConfig implements WebMvcConfigurer {
 

+ 1 - 1
src/main/java/com/nokia/tsl_data/controller/DataWarehouseController.java

@@ -1,6 +1,6 @@
 package com.nokia.tsl_data.controller;
 
-import com.nokia.common.http.vo.R;
+import com.nokia.tsl_data.entity.vo.R;
 import com.nokia.tsl_data.service.DataWarehouseService;
 import com.nokia.tsl_data.service.TaskService;
 import lombok.extern.slf4j.Slf4j;

+ 1 - 1
src/main/java/com/nokia/tsl_data/controller/ReportGenerateController.java

@@ -1,6 +1,6 @@
 package com.nokia.tsl_data.controller;
 
-import com.nokia.common.http.vo.R;
+import com.nokia.tsl_data.entity.vo.R;
 import com.nokia.tsl_data.service.HighQualityDataService;
 import com.nokia.tsl_data.service.TaskService;
 import lombok.extern.slf4j.Slf4j;

+ 83 - 35
src/main/java/com/nokia/tsl_data/dao/TslDataDao.java

@@ -1,6 +1,7 @@
 package com.nokia.tsl_data.dao;
 
 import com.nokia.tsl_data.entity.WorkFlowBasicData;
+
 import org.springframework.jdbc.core.JdbcTemplate;
 import org.springframework.jdbc.object.BatchSqlUpdate;
 import org.springframework.stereotype.Component;
@@ -29,9 +30,10 @@ public class TslDataDao {
         String sqlFormat = "INSERT INTO tsl_data.%s\n" +
                 "(city_id, kfsn, region_id, work_flow_create_time, work_flow_update_time)\n" +
                 "VALUES(?,?,?,?,?)";
-        BatchSqlUpdate batchSqlUpdate = new BatchSqlUpdate(Objects.requireNonNull(jdbcTemplate.getDataSource()), String.format(sqlFormat, tableName));
+        BatchSqlUpdate batchSqlUpdate = new BatchSqlUpdate(Objects.requireNonNull(jdbcTemplate.getDataSource()),
+                String.format(sqlFormat, tableName));
         batchSqlUpdate.setBatchSize(1000);
-        batchSqlUpdate.setTypes(new int[]{
+        batchSqlUpdate.setTypes(new int[] {
                 Types.VARCHAR, Types.VARCHAR, Types.VARCHAR, Types.TIMESTAMP, Types.TIMESTAMP
         });
         for (WorkFlowBasicData item : data) {
@@ -40,8 +42,7 @@ public class TslDataDao {
                     item.getKfsn(),
                     item.getRegionId(),
                     item.getWorkFlowCreateTime() == null ? null : Timestamp.from(item.getWorkFlowCreateTime()),
-                    item.getWorkFlowUpdateTime() == null ? null : Timestamp.from(item.getWorkFlowUpdateTime())
-            );
+                    item.getWorkFlowUpdateTime() == null ? null : Timestamp.from(item.getWorkFlowUpdateTime()));
         }
         batchSqlUpdate.flush();
     }
@@ -89,13 +90,15 @@ public class TslDataDao {
      * 从临时表中查找新增内容并插入work_flow_basic_data表
      */
     public int insertWorkFlowBasicDataFromTempTable(String tempTableName) {
-        String sqlFormat = "insert into tsl_data.work_flow_basic_data \n" +
-                "(city_id, kfsn, region_id, work_flow_create_time, work_flow_update_time, create_date, last_update_date) \n" +
-                "select t.city_id, t.kfsn, t.region_id, t.work_flow_create_time, t.work_flow_update_time, now(), now() \n" +
-                "from tsl_data.%s t \n" +
-                "left join tsl_data.work_flow_basic_data b \n" +
-                "on t.kfsn = b.kfsn \n" +
-                "where b.kfsn is null";
+        String sqlFormat = "insert into tsl_data.work_flow_basic_data" +
+                " (city_id, kfsn, region_id, work_flow_create_time, work_flow_update_time," +
+                " create_date, last_update_date)" +
+                " select t.city_id, t.kfsn, t.region_id, t.work_flow_create_time," +
+                " t.work_flow_update_time, now(), now()" +
+                " from tsl_data.%s t " +
+                " left join tsl_data.work_flow_basic_data b" +
+                " on t.kfsn = b.kfsn" +
+                " where b.kfsn is null";
         return jdbcTemplate.update(String.format(sqlFormat, tempTableName));
     }
 
@@ -104,49 +107,94 @@ public class TslDataDao {
      */
     public int updateWorkFlowBasicDataFromTempTable(String tempTableName) {
         String sqlFormat = "update tsl_data.work_flow_basic_data b\n" +
-                "set city_id = t.city_id, region_id = t.region_id, work_flow_update_time = t.work_flow_update_time, last_update_date = now()\n" +
-                "from (select city_id, kfsn, region_id, work_flow_create_time, work_flow_update_time from tsl_data.%s) t\n" +
-                "where b.kfsn = t.kfsn and (b.city_id != t.city_id or b.region_id != t.region_id)";
+                "set city_id = t.city_id, region_id = t.region_id, work_flow_update_time = t.work_flow_update_time," +
+                " last_update_date = now()" +
+                " from (select city_id, kfsn, region_id, work_flow_create_time, work_flow_update_time" +
+                " from tsl_data.%s) t" +
+                " where b.kfsn = t.kfsn and (b.city_id != t.city_id or b.region_id != t.region_id)";
         return jdbcTemplate.update(String.format(sqlFormat, tempTableName));
     }
 
     /**
-     * high_quality_list_day 高质量明细数据入库
+     * high_quality_list_day 河北_CEM高品质2日明细
      */
     public void batchInsertHighQualityListDay(List<Object[]> data) {
         String sql = "INSERT INTO tsl_data.high_quality_list_day\n" +
-                "(month_id, day_id, contact_id, busino_area_name, busino_prov_name, sheet_no, busi_number, cust_star_name, sheet_type_name, serv_type_name, last_deal_content," +
-                " accept_time, archived_time, data_type_name, channel_name, profes_dep, big_type_name, small_type_name, is_dispatch_cloud, accept_channel_name, duty_reason_name," +
-                " duty_major_name, is_online_complete, is_call_complete, is_cusser_complete, is_distr_complete, caller_number, compl_area_name, compl_prov_name, submit_channel," +
-                " solved_result_desc, cust_level_name, busi_type_name, urgent_level_name, important_type_name, is_upgrade, actual_total_len, nature_actual_total_len," +
-                " cust_satis_desc, auto_is_ok, auto_cust_satis_desc, nonauto_is_ok_name, nonauto_cust_satis_desc, prod_type_name, proc_name, merge_satis_desc, serv_type_name_new," +
-                " is_svip_keyman, customer_label, prov_name, area_id, area_name, city_id, city_name, grid_id, grid_name, is_distri_area, cp_satisfaction, cp_is_ok," +
-                " cp_timely_contact, cp_type, novisit_tag, serv_content, gis_area, gis_area_name, gis_city, gis_city_name, use_gis, month_date, day_date)" +
-                "VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," +
-                " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
+                "(month_id, day_id, contact_id, busino_area_name, busino_prov_name, sheet_no, busi_number," +
+                " cust_star_name, sheet_type_name, serv_type_name, last_deal_content, accept_time, archived_time," +
+                " data_type_name, channel_name, profes_dep, big_type_name, small_type_name, is_dispatch_cloud," +
+                " accept_channel_name, duty_reason_name, duty_major_name, is_online_complete, is_call_complete," +
+                " is_cusser_complete, is_distr_complete, caller_number, compl_area_name, compl_prov_name," +
+                " submit_channel, solved_result_desc, cust_level_name, busi_type_name, urgent_level_name," +
+                " important_type_name, is_upgrade, actual_total_len, nature_actual_total_len, cust_satis_desc," +
+                " auto_is_ok, auto_cust_satis_desc, nonauto_is_ok_name, nonauto_cust_satis_desc, prod_type_name," +
+                " proc_name, merge_satis_desc, serv_type_name_new, is_svip_keyman, customer_label, prov_name," +
+                " area_id, area_name, city_id, city_name, grid_id, grid_name, is_distri_area, cp_satisfaction," +
+                " cp_is_ok, cp_timely_contact, cp_type, novisit_tag, serv_content, gis_area, gis_area_name," +
+                " gis_city, gis_city_name, use_gis, month_date, day_date)" +
+                " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," +
+                " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," +
+                " ?, ?, ?, ?, ?, ?, ?, ?, ?)";
         jdbcTemplate.batchUpdate(sql, data);
     }
 
     /**
-     * high_quality_count_day 高质量统计数据入库
+     * high_quality_count_day 河北_CEM高品质2日统计 数据入库
      */
     public void batchInsertHighQualityCountDay(List<Object[]> data) {
-        String sql = "insert into tsl_data.high_quality_count_day (month_id,day_id,acct_date,businoareaname,profes_dep,big_type_name,small_type_name,total_complaints," +
-                "hotline_complaints,other_complaint,litigation_volume,satisfaction_rate,satisfaction_count,total_evaluation,complaint_satisfied,complaint_satisfied_list," +
-                "complaint_satisfied_count,complaint_resolution,complaint_resolution_list,complaint_resolution_count,complaint_response,complaint_response_list," +
-                "complaint_response_count,complaint,fault_satisfaction_rate,fault_satisfaction_list,fault_satisfaction_count,fault_resolution_rate,fault_resolution_list," +
-                "fault_resolution_count,fault_response_rate,fault_response_list,fault_response_count)\n" +
+        String sql = "insert into tsl_data.high_quality_count_day (month_id,day_id,acct_date,businoareaname," +
+                " profes_dep,big_type_name,small_type_name,total_complaints,hotline_complaints,other_complaint," +
+                " litigation_volume,satisfaction_rate,satisfaction_count,total_evaluation,complaint_satisfied," +
+                " complaint_satisfied_list,complaint_satisfied_count,complaint_resolution,complaint_resolution_list," +
+                " complaint_resolution_count,complaint_response,complaint_response_list,complaint_response_count," +
+                " complaint,fault_satisfaction_rate,fault_satisfaction_list,fault_satisfaction_count," +
+                " fault_resolution_rate,fault_resolution_list,fault_resolution_count,fault_response_rate," +
+                " fault_response_list,fault_response_count)" +
                 " values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
         jdbcTemplate.batchUpdate(sql, data);
     }
 
+    /**
+     * MOBILE_COMPLAINT_DETAILS_DAY 河北_CEM移网质量投诉明细 数据入库
+     */
     public void batchInsertMobileComplaintDay(List<Object[]> data) {
-        String sql = "insert into tsl_data.mobile_complaint_day (month_id,day_id,acct_date,sheet_no,is_online_complete,contact_no,busi_no,serv_content,last_deal_content," +
-                "deal_depart_name,deal_opinion,serv_type,bus_type,duty_reason,accept_channel,submit_channel,compl_area_local,duty_major,product_name,sp_product_code," +
-                "pre_repair_name,pre_repair_charges,fault_location,cust_level,satisfaction_in_reply,is_ok_in_reply,accept_time,end_time,proce_time,cust_area,is_cust_serv_complete," +
-                "is_send_sheet_complete,is_repeat,is_upgrade,is_timeout,gis_city,process_nums,deal_depart_name_1,deal_depart_name_2,deal_depart_name_3,first_call_back_time," +
-                "proce_remark,duty_major_day,duty_reason_id_day,duty_major_month,duty_reason_id_month,voice_text)" +
+        String sql = "insert into tsl_data.mobile_complaint_day (month_id,day_id,acct_date,sheet_no," +
+                " is_online_complete,contact_no,busi_no,serv_content,last_deal_content,deal_depart_name," +
+                " deal_opinion,serv_type,bus_type,duty_reason,accept_channel,submit_channel,compl_area_local," +
+                " duty_major,product_name,sp_product_code,pre_repair_name,pre_repair_charges,fault_location," +
+                " cust_level,satisfaction_in_reply,is_ok_in_reply,accept_time,end_time,proce_time,cust_area," +
+                " is_cust_serv_complete,is_send_sheet_complete,is_repeat,is_upgrade,is_timeout,gis_city," +
+                " process_nums,deal_depart_name_1,deal_depart_name_2,deal_depart_name_3,first_call_back_time," +
+                " proce_remark,duty_major_day,duty_reason_id_day,duty_major_month,duty_reason_id_month,voice_text)" +
                 " values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)";
         jdbcTemplate.batchUpdate(sql, data);
     }
+
+    /**
+     * COMPLAINT_DETAILS_FIX_YWD_DAY 河北客户体验管理智能定责投诉明细月累计接口日 数据入库
+     */
+    public void batchInsertComplaintDetailsFixYwdDay(List<Object[]> data) {
+        String sql = "INSERT INTO tsl_data.complaint_details_fix_ywd_day " +
+                "(month_id, day_id, month_id1, day_id1, sheet_no, is_online_complete, contact_no, busi_no," +
+                " serv_content, last_deal_content, deal_depart_name, deal_user, deal_opinion, complete_user_code," +
+                " serv_type, bus_type, duty_reason, problem_duty, problem_solving_con, code_urgent_level," +
+                " code_important_type, cust_name, accept_user, cust_level, accept_channel, submit_channel," +
+                " compl_area_local, compl_city_local, compl_grid_local, cust_province, accept_time, end_time," +
+                " cust_area, duty_major, is_call_center_complete, is_cust_serv_complete, is_send_sheet_complete," +
+                " sp_code, sp_name, is_repeat, is_timeout, duty_dept, proce_user_code, proce_time," +
+                " proce_depart_name, proce_remark, satisfaction, is_ok, is_ok_in_reply, product_name," +
+                " pre_repair_name, pre_repair_charges, actual_total_len, timeout_len, timeout_depart_name," +
+                " sp_product_code, sp_product_name, process_nums, deal_depart_name_1, deal_user_1, sended_user_1," +
+                " deal_depart_name_2, deal_user_2, sended_user_2, deal_depart_name_3, deal_user_3, sended_user_3," +
+                " deal_depart_name_end, deal_user_end, sended_depart_name_last, first_call_back_time," +
+                " duty_reason_id_day, duty_major_day, duty_reason_id_month, duty_major_month, voice_text," +
+                " fault_location, satisfaction_in_reply, is_upgrade, gis_city, data_source, code_answer_way," +
+                " success_call_back, last_satisfaction, last_is_ok, design_profess, deal_unit, province_deal_unit," +
+                " if_direct, is_proself, timely_contact, is_repeat_5times_m, is_repeat_2times_m, is_repeat_2times_2m," +
+                " is_repeat_2times_3m, create_time)" +
+                "VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," +
+                " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?," +
+                " ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, now());";
+        jdbcTemplate.batchUpdate(sql, data);
+    }
 }

+ 2 - 1
src/main/java/com/nokia/tsl_data/entity/TaskRecord.java

@@ -1,7 +1,8 @@
 package com.nokia.tsl_data.entity;
 
 import com.alibaba.fastjson2.JSONObject;
-import com.nokia.common.spring.jpa.converter.JSONObjectConverter;
+import com.nokia.tsl_data.entity.converter.JSONObjectConverter;
+
 import lombok.Data;
 import org.springframework.data.annotation.CreatedDate;
 import org.springframework.data.annotation.LastModifiedDate;

+ 1 - 1
src/main/java/com/nokia/common/spring/jpa/converter/JSONObjectConverter.java → src/main/java/com/nokia/tsl_data/entity/converter/JSONObjectConverter.java

@@ -1,4 +1,4 @@
-package com.nokia.common.spring.jpa.converter;
+package com.nokia.tsl_data.entity.converter;
 
 import com.alibaba.fastjson2.JSONObject;
 

+ 1 - 1
src/main/java/com/nokia/common/http/vo/R.java → src/main/java/com/nokia/tsl_data/entity/vo/R.java

@@ -1,4 +1,4 @@
-package com.nokia.common.http.vo;
+package com.nokia.tsl_data.entity.vo;
 
 import lombok.Data;
 

+ 7 - 0
src/main/java/com/nokia/tsl_data/properties/DataWarehouseProperties.java

@@ -36,4 +36,11 @@ public class DataWarehouseProperties {
     private String prefixOfMobileComplaint = "HE_D_MOBILE_COMPLAINT_DETAILS_DAY_1087468015013851136_";
     // HighQualityCountDay 字段数量
     private Integer filedNumOfMobileComplaintDay = 47;
+
+    /**
+     * 河北客户体验管理智能定责投诉明细月累计接口日
+     */
+    private String dirOfComplaintDetailsFixYwdDay = "/data/nenglishangdian/complaint_details_fix_ywd_day/";
+    private String prefixOfComplaintDetailsFixYwdDay = "HE_D_COMPLAINT_DETAILS_FIX_YWD_DAY_1186327221372567552_";
+    private Integer filedNumOfComplaintDetailsFixYwdDay = 95;
 }

+ 1 - 1
src/main/java/com/nokia/tsl_data/scheduling/controller/RegisteredTaskController.java

@@ -1,7 +1,7 @@
 package com.nokia.tsl_data.scheduling.controller;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.nokia.common.http.vo.R;
+import com.nokia.tsl_data.entity.vo.R;
 import com.nokia.tsl_data.scheduling.entity.RegisteredTask;
 import com.nokia.tsl_data.scheduling.service.RegisteredTaskService;
 

+ 1 - 1
src/main/java/com/nokia/tsl_data/scheduling/controller/ScheduledTaskController.java

@@ -1,7 +1,7 @@
 package com.nokia.tsl_data.scheduling.controller;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.nokia.common.http.vo.R;
+import com.nokia.tsl_data.entity.vo.R;
 import com.nokia.tsl_data.scheduling.entity.ScheduledTask;
 import com.nokia.tsl_data.scheduling.service.SchedulingService;
 

+ 1 - 1
src/main/java/com/nokia/tsl_data/scheduling/service/RegisteredTaskService.java

@@ -1,8 +1,8 @@
 package com.nokia.tsl_data.scheduling.service;
 
-import com.nokia.common.dao.SnowFlakeUtil;
 import com.nokia.tsl_data.scheduling.dao.RegisteredTaskMapper;
 import com.nokia.tsl_data.scheduling.entity.RegisteredTask;
+import com.nokia.tsl_data.util.SnowFlakeUtil;
 
 import org.springframework.context.ApplicationContext;
 import org.springframework.stereotype.Service;

+ 1 - 1
src/main/java/com/nokia/tsl_data/scheduling/service/SchedulingService.java

@@ -1,9 +1,9 @@
 package com.nokia.tsl_data.scheduling.service;
 
-import com.nokia.common.dao.SnowFlakeUtil;
 import com.nokia.tsl_data.scheduling.dao.ScheduledTaskMapper;
 import com.nokia.tsl_data.scheduling.entity.RegisteredTask;
 import com.nokia.tsl_data.scheduling.entity.ScheduledTask;
+import com.nokia.tsl_data.util.SnowFlakeUtil;
 
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.stereotype.Service;

+ 98 - 39
src/main/java/com/nokia/tsl_data/service/DataWarehouseService.java

@@ -1,9 +1,10 @@
 package com.nokia.tsl_data.service;
 
-import com.nokia.common.codec.MD5Util;
-import com.nokia.common.io.TextUtil;
 import com.nokia.tsl_data.dao.*;
 import com.nokia.tsl_data.properties.DataWarehouseProperties;
+import com.nokia.tsl_data.util.CodecUtil;
+import com.nokia.tsl_data.util.TextUtil;
+
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.csv.CSVFormat;
 import org.apache.commons.csv.CSVParser;
@@ -31,7 +32,10 @@ public class DataWarehouseService {
     private final SysDataDictionaryRepository sysDataDictionaryRepository;
     private final DataWarehouseProperties dataWarehouseProperties;
 
-    public DataWarehouseService(TslDataDao tslDataDao, MobileComplaintMapper mobileComplaintMapper, HighQualityCountMapper highQualityCountMapper, HighQualityListDayMapper highQualityListDayMapper, SysDataDictionaryRepository sysDataDictionaryRepository, DataWarehouseProperties dataWarehouseProperties, MessageService messageService) {
+    public DataWarehouseService(TslDataDao tslDataDao, MobileComplaintMapper mobileComplaintMapper,
+            HighQualityCountMapper highQualityCountMapper, HighQualityListDayMapper highQualityListDayMapper,
+            SysDataDictionaryRepository sysDataDictionaryRepository, DataWarehouseProperties dataWarehouseProperties,
+            MessageService messageService) {
         this.tslDataDao = tslDataDao;
         this.mobileComplaintMapper = mobileComplaintMapper;
         this.highQualityCountMapper = highQualityCountMapper;
@@ -47,14 +51,14 @@ public class DataWarehouseService {
         File mobileComplaintDayFile = getMobileComplaintDayFile(day);
         if (!mobileComplaintDayFile.exists()) {
             stringBuffer.append("河北_CEM移网质量投诉明细_HE_D_MOBILE_COMP表账期 ").append(day).append(" 数据未到达");
-        }else if (!checkMD5(mobileComplaintDayFile)) {
+        } else if (!checkMD5(mobileComplaintDayFile)) {
             stringBuffer.append("河北_CEM移网质量投诉明细_HE_D_MOBILE_COMP表账期 ").append(day).append(" MD5验证未通过");
         }
         // 河北_CEM高品质2日明细
         File highQualityListDayFile = getHighQualityListDayFile(day);
         if (!highQualityListDayFile.exists()) {
             stringBuffer.append("河北_CEM高品质2日明细_HE_D_HIGH_QUALITY_LIST表账期 ").append(day).append(" 数据未到达");
-        }else if (!checkMD5(highQualityListDayFile)) {
+        } else if (!checkMD5(highQualityListDayFile)) {
             stringBuffer.append("河北_CEM高品质2日明细_HE_D_HIGH_QUALITY_LIST表账期 ").append(day).append(" MD5验证未通过");
         }
         // 河北_CEM高品质2日统计
@@ -67,6 +71,9 @@ public class DataWarehouseService {
         return stringBuffer.toString();
     }
 
+    /**
+     * 入库全部文件的任务
+     */
     public void wareHouse(String day) {
         int count;
         if ((count = highQualityCountMapper.countForDay(day)) == 0) {
@@ -86,46 +93,57 @@ public class DataWarehouseService {
         }
     }
 
-    private File getMobileComplaintDayFile(String day) {
-        String fileName = dataWarehouseProperties.getPrefixOfMobileComplaint() + day + ".csv";
-        return Paths.get(dataWarehouseProperties.getDirOfMobileComplaint(), fileName).toFile();
-    }
-
-    private File getHighQualityListDayFile(String day) {
-        String fileName = dataWarehouseProperties.getPrefixOfHighQualityListDay() + day + ".csv";
-        return Paths.get(dataWarehouseProperties.getDirOfHighQualityListDay(), fileName).toFile();
+    /**
+     * 删除 河北_CEM移网质量投诉明细 数据
+     */
+    public int deleteMobileComplaintDay(String day) {
+        return mobileComplaintMapper.deleteMobileCompForDay(day);
     }
 
-    private File getHighQualityCountDayFile(String day) {
-        String fileName = dataWarehouseProperties.getPrefixOfHighQualityCountDay() + day + ".csv";
-        return Paths.get(dataWarehouseProperties.getDirOfHighQualityCountDay(), fileName).toFile();
+    /**
+     * 删除 河北_CEM高品质2日统计 数据
+     */
+    public int deleteHighQualityCountDay(String day) {
+        return highQualityCountMapper.deleteHighQualityCountForDay(day);
     }
 
     /**
-     * 验证文件的MD5
+     * 删除 河北_CEM高质量2日明细 数据
      */
-    private boolean checkMD5(File file) {
-        String md5String = TextUtil.readLinesWithUTF8(file.getAbsolutePath() + ".MD5").get(0);
-        String md5OfFile = MD5Util.MD5OfFile(file);
-        return md5String.equals(md5OfFile);
+    public int deleteHighQualityListDay(String day) {
+        return highQualityListDayMapper.deleteHighQualityListForDay(day);
     }
 
+    /**
+     * 入库 河北高质量2日明细数据
+     */
     public void warehouseHighQualityListDay(String day) {
         warehouseHighQualityListDay(getHighQualityListDayFile(day));
     }
 
+    /**
+     * 入库 河北_CEM移网质量投诉明细
+     */
     public void warehouseMobileComplaintDay(String day) {
         warehouseMobileComplaintDay(getMobileComplaintDayFile(day));
     }
 
+    /**
+     * 入库 河北_CEM高品质2日统计
+     */
     public void warehouseHighQualityCountDay(String day) {
         warehouseHighQualityCountDay(getHighQualityCountDayFile(day));
     }
 
     /**
-     * 入库 河北高质量2日明细数据
+     * 入库 河北客户体验管理智能定责投诉明细月累计接口日 数据
      */
-    public void warehouseHighQualityListDay(File file) {
+    public void warehouseComplaintDetailsFixYwdDay(String day) {
+        warehouseComplaintDetailsFixYwdDay(getComplaintDetailsFixYwdDayFile(day));
+    }
+
+    private void warehouseHighQualityListDay(File file) {
+        // 读取文件
         try (Reader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)) {
             // SOH作为分割符
             char delimiter = 1;
@@ -150,16 +168,14 @@ public class DataWarehouseService {
                 }
             }
             tslDataDao.batchInsertHighQualityListDay(list);
+            log.debug("河北_CEM高质量2日明细 数据入库成功...");
         } catch (IOException e) {
             e.printStackTrace();
-            throw new RuntimeException("河北_CEM高质量2日明细数据入库失败..." + e.getMessage());
+            throw new RuntimeException("河北_CEM高质量2日明细 数据入库失败..." + e.getMessage());
         }
     }
 
-    /**
-     * 入库 河北_CEM移网质量投诉明细
-     */
-    public void warehouseMobileComplaintDay(File file) {
+    private void warehouseMobileComplaintDay(File file) {
         try (Reader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)) {
             // SOH作为分割符
             char delimiter = 1;
@@ -179,16 +195,14 @@ public class DataWarehouseService {
                 }
             }
             tslDataDao.batchInsertMobileComplaintDay(list);
+            log.debug("河北_CEM移网质量投诉明细 数据入库成功...");
         } catch (IOException e) {
             e.printStackTrace();
-            throw new RuntimeException("河北_CEM移网质量投诉明细数据入库失败..." + e.getMessage());
+            throw new RuntimeException("河北_CEM移网质量投诉明细 数据入库失败..." + e.getMessage());
         }
     }
 
-    /**
-     * 入库 河北_CEM高品质2日统计
-     */
-    public void warehouseHighQualityCountDay(File file) {
+    private void warehouseHighQualityCountDay(File file) {
         try (Reader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)) {
             // SOH作为分割符
             char delimiter = 1;
@@ -208,21 +222,66 @@ public class DataWarehouseService {
                 }
             }
             tslDataDao.batchInsertHighQualityCountDay(list);
+            log.debug("河北_CEM高品质2日统计 数据入库成功...");
         } catch (IOException e) {
             e.printStackTrace();
             throw new RuntimeException("河北_CEM高品质2日统计数据入库失败..." + e.getMessage());
         }
     }
 
-    public int deleteMobileComplaintDay(String day) {
-        return mobileComplaintMapper.deleteMobileCompForDay(day);
+    private void warehouseComplaintDetailsFixYwdDay(File file) {
+        try (Reader reader = new InputStreamReader(new FileInputStream(file), StandardCharsets.UTF_8)) {
+            // SOH作为分割符
+            char delimiter = 1;
+            CSVParser parser = CSVFormat.DEFAULT.builder()
+                    .setRecordSeparator("\n")
+                    .setQuote(null)
+                    .setDelimiter(delimiter)
+                    .setSkipHeaderRecord(false)
+                    .build().parse(reader);
+            List<CSVRecord> records = parser.getRecords();
+            List<Object[]> list = new ArrayList<>();
+            for (CSVRecord record : records) {
+                Object[] ps = new Object[dataWarehouseProperties.getFiledNumOfComplaintDetailsFixYwdDay().intValue()];
+                list.add(ps);
+                for (int i = 0; i < ps.length; i++) {
+                    ps[i] = record.get(i);
+                }
+            }
+            tslDataDao.batchInsertComplaintDetailsFixYwdDay(list);
+            log.debug("河北客户体验管理智能定责投诉明细月累计接口日 数据入库成功...");
+        } catch (IOException e) {
+            e.printStackTrace();
+            throw new RuntimeException("河北客户体验管理智能定责投诉明细月累计接口日..." + e.getMessage());
+        }
     }
 
-    public int deleteHighQualityCountDay(String day) {
-        return highQualityCountMapper.deleteHighQualityCountForDay(day);
+    private File getMobileComplaintDayFile(String day) {
+        String fileName = dataWarehouseProperties.getPrefixOfMobileComplaint() + day + ".csv";
+        return Paths.get(dataWarehouseProperties.getDirOfMobileComplaint(), fileName).toFile();
     }
 
-    public int deleteHighQualityListDay(String day) {
-        return highQualityListDayMapper.deleteHighQualityListForDay(day);
+    private File getHighQualityListDayFile(String day) {
+        String fileName = dataWarehouseProperties.getPrefixOfHighQualityListDay() + day + ".csv";
+        return Paths.get(dataWarehouseProperties.getDirOfHighQualityListDay(), fileName).toFile();
+    }
+
+    private File getHighQualityCountDayFile(String day) {
+        String fileName = dataWarehouseProperties.getPrefixOfHighQualityCountDay() + day + ".csv";
+        return Paths.get(dataWarehouseProperties.getDirOfHighQualityCountDay(), fileName).toFile();
+    }
+
+    private File getComplaintDetailsFixYwdDayFile(String day) {
+        String fileName = dataWarehouseProperties.getPrefixOfComplaintDetailsFixYwdDay() + day + ".csv";
+        return Paths.get(dataWarehouseProperties.getDirOfComplaintDetailsFixYwdDay(), fileName).toFile();
+    }
+
+    /**
+     * 验证文件的MD5
+     */
+    private boolean checkMD5(File file) {
+        String md5String = TextUtil.readLinesWithUTF8(file.getAbsolutePath() + ".MD5").get(0);
+        String md5OfFile = CodecUtil.MD5OfFile(file);
+        return md5String.equals(md5OfFile);
     }
 }

+ 2 - 2
src/main/java/com/nokia/tsl_data/service/TaskService.java

@@ -2,8 +2,6 @@ package com.nokia.tsl_data.service;
 
 import com.alibaba.fastjson2.JSONArray;
 import com.alibaba.fastjson2.JSONObject;
-import com.nokia.common.basic.DateUtil;
-import com.nokia.common.basic.InstantUtil;
 import com.nokia.tsl_data.dao.TaskRecordRepository;
 import com.nokia.tsl_data.dao.TslDataDao;
 import com.nokia.tsl_data.entity.TaskRecord;
@@ -12,6 +10,8 @@ import com.nokia.tsl_data.scheduling.entity._enum.ScheduledStatus;
 import com.nokia.tsl_data.scheduling.entity._enum.ScheduledType;
 import com.nokia.tsl_data.scheduling.entity.pojo.ScheduledParameter;
 import com.nokia.tsl_data.scheduling.service.SchedulingService;
+import com.nokia.tsl_data.util.DateUtil;
+import com.nokia.tsl_data.util.InstantUtil;
 
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.stereotype.Service;

+ 1 - 0
src/main/java/com/nokia/tsl_data/service/TslDataService.java

@@ -3,6 +3,7 @@ package com.nokia.tsl_data.service;
 import com.nokia.tsl_data.dao.*;
 import com.nokia.tsl_data.entity.TargetTsRatio;
 import com.nokia.tsl_data.properties.CustomerRateTargetProperties;
+
 import org.springframework.stereotype.Service;
 
 import java.text.ParseException;

+ 10 - 7
src/main/java/com/nokia/tsl_data/service/TslReportService.java

@@ -1,11 +1,12 @@
 package com.nokia.tsl_data.service;
 
-import com.nokia.common.io.excel.entity.CellRect;
-import com.nokia.common.io.excel.poi.PoiUtil;
 import com.nokia.tsl_data.dao.HighQualityCountMapper;
 import com.nokia.tsl_data.dao.MobileComplaintMapper;
 import com.nokia.tsl_data.dao.SysDataDictionaryRepository;
 import com.nokia.tsl_data.properties.OutputProperties;
+import com.nokia.tsl_data.util.excel.entity.CellRect;
+import com.nokia.tsl_data.util.excel.poi.PoiUtil;
+
 import lombok.extern.slf4j.Slf4j;
 import org.apache.poi.EncryptedDocumentException;
 import org.apache.poi.ss.usermodel.*;
@@ -68,7 +69,8 @@ public class TslReportService {
      */
     public void screenShotV2(String day) {
         String fileName = outputProperties.getOutputFileNamePrefix() + day + ".xlsx";
-        File file = Paths.get(outputProperties.getOutputPath(), day, fileName).toFile();
+        // 输出文件路径 增加V2
+        File file = Paths.get(outputProperties.getOutputPath(), "V2", day, fileName).toFile();
         if (!file.exists()) {
             throw new RuntimeException(String.format("无法截图,文件%s不存在", file.getAbsolutePath()));
         }
@@ -128,7 +130,8 @@ public class TslReportService {
      */
     public void generateReportV2(String day) {
         String fileName = outputProperties.getOutputFileNamePrefix() + day + ".xlsx";
-        File file = Paths.get(outputProperties.getOutputPath(), day).toFile();
+        // 输出路径 增加 V2
+        File file = Paths.get(outputProperties.getOutputPath(), "V2", day).toFile();
         if (!file.exists()) {
             boolean mkdirs = file.mkdirs();
             System.out.println(mkdirs);
@@ -157,7 +160,7 @@ public class TslReportService {
         getCityThreeRateSheet(day);
         // 区县三率
         getSheet7(day);
-        try (OutputStream outputStream = new FileOutputStream(fileName)) {
+        try (OutputStream outputStream = new FileOutputStream(Paths.get(file.getAbsolutePath(), fileName).toFile())) {
             workbook.write(outputStream);
             workbook.close();
             workbook = null;
@@ -169,7 +172,7 @@ public class TslReportService {
 
     public void generateReportV1(String day) {
         String fileName = outputProperties.getOutputFileNamePrefix() + day + ".xlsx";
-        File file = Paths.get(outputProperties.getOutputPath(), day).toFile();
+        File file = Paths.get(outputProperties.getOutputPath(), "V1", day).toFile();
         if (!file.exists()) {
             boolean mkdirs = file.mkdirs();
             System.out.println(mkdirs);
@@ -194,7 +197,7 @@ public class TslReportService {
         getSheet3(day);
         // 客户端-投诉问题解决满意度 客户端-投诉问题解决率 客户端-投诉问题响应率
         getSheet4_6(day);
-        try (OutputStream outputStream = new FileOutputStream(fileName)) {
+        try (OutputStream outputStream = new FileOutputStream(Paths.get(file.getAbsolutePath(), fileName).toFile())) {
             workbook.write(outputStream);
             workbook.close();
             workbook = null;

+ 13 - 30
src/main/java/com/nokia/common/codec/MD5Util.java → src/main/java/com/nokia/tsl_data/util/CodecUtil.java

@@ -1,39 +1,19 @@
-package com.nokia.common.codec;
+package com.nokia.tsl_data.util;
 
-import org.springframework.util.DigestUtils;
-
-import java.io.*;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
 import java.security.MessageDigest;
 import java.security.NoSuchAlgorithmException;
 
-public class MD5Util {
-
-    public static String encrypt(String str) {
-        return encrypt(str, "utf8");
-    }
-
-    public static String encrypt(String str, String charset) {
-        try {
-            return DigestUtils.md5DigestAsHex(str.getBytes(charset));
-        } catch (UnsupportedEncodingException e) {
-            e.printStackTrace();
-            throw new RuntimeException("使用指定的编码 " + charset + " 解码失败: " + e.getMessage());
-        }
-    }
-
-    public static String encode(String str, String charset) {
-        return encrypt(str, charset);
-    }
-
-    public static String encode(String str) {
-        return encode(str, "utf8");
-    }
+public class CodecUtil {
 
     /**
-     * 返回路径对应的文件的md5
+     * 返回 路径对应的文件 的 md5
      */
     public static String MD5OfFile(String filePath) {
-        try (InputStream inputStream = new FileInputStream(filePath)){
+        try (InputStream inputStream = new FileInputStream(filePath)) {
             return MD5OfInputStream(inputStream);
         } catch (IOException e) {
             e.printStackTrace();
@@ -42,10 +22,10 @@ public class MD5Util {
     }
 
     /**
-     * 返回文件的md5
+     * 返回 文件  md5
      */
     public static String MD5OfFile(File file) {
-        try (InputStream inputStream = new FileInputStream(file)){
+        try (InputStream inputStream = new FileInputStream(file)) {
             return MD5OfInputStream(inputStream);
         } catch (IOException e) {
             e.printStackTrace();
@@ -53,6 +33,9 @@ public class MD5Util {
         }
     }
 
+    /**
+     * 返回 InputStream 的 md5
+     */
     public static String MD5OfInputStream(InputStream inputStream) {
         try {
             MessageDigest messageDigest = MessageDigest.getInstance("MD5");

+ 1 - 1
src/main/java/com/nokia/common/basic/DateUtil.java → src/main/java/com/nokia/tsl_data/util/DateUtil.java

@@ -1,4 +1,4 @@
-package com.nokia.common.basic;
+package com.nokia.tsl_data.util;
 
 import java.text.DateFormat;
 import java.text.ParseException;

+ 1 - 1
src/main/java/com/nokia/common/basic/InstantUtil.java → src/main/java/com/nokia/tsl_data/util/InstantUtil.java

@@ -1,4 +1,4 @@
-package com.nokia.common.basic;
+package com.nokia.tsl_data.util;
 
 import java.time.ZoneId;
 import java.time.format.DateTimeFormatter;

+ 1 - 1
src/main/java/com/nokia/common/dao/SnowFlakeUtil.java → src/main/java/com/nokia/tsl_data/util/SnowFlakeUtil.java

@@ -1,4 +1,4 @@
-package com.nokia.common.dao;
+package com.nokia.tsl_data.util;
 
 import java.util.Date;
 

+ 1 - 1
src/main/java/com/nokia/common/io/TextUtil.java → src/main/java/com/nokia/tsl_data/util/TextUtil.java

@@ -1,4 +1,4 @@
-package com.nokia.common.io;
+package com.nokia.tsl_data.util;
 
 import org.springframework.util.StringUtils;
 

+ 1 - 1
src/main/java/com/nokia/common/io/excel/entity/AlignmentEnum.java → src/main/java/com/nokia/tsl_data/util/excel/entity/AlignmentEnum.java

@@ -1,4 +1,4 @@
-package com.nokia.common.io.excel.entity;
+package com.nokia.tsl_data.util.excel.entity;
 
 public enum AlignmentEnum {
     CENTER, LEFT, RIGHT

+ 1 - 1
src/main/java/com/nokia/common/io/excel/entity/CellInfo.java → src/main/java/com/nokia/tsl_data/util/excel/entity/CellInfo.java

@@ -1,4 +1,4 @@
-package com.nokia.common.io.excel.entity;
+package com.nokia.tsl_data.util.excel.entity;
 
 import lombok.Data;
 

+ 1 - 1
src/main/java/com/nokia/common/io/excel/entity/CellRect.java → src/main/java/com/nokia/tsl_data/util/excel/entity/CellRect.java

@@ -1,4 +1,4 @@
-package com.nokia.common.io.excel.entity;
+package com.nokia.tsl_data.util.excel.entity;
 
 import lombok.Data;
 

+ 1 - 1
src/main/java/com/nokia/common/io/excel/entity/Gradient.java → src/main/java/com/nokia/tsl_data/util/excel/entity/Gradient.java

@@ -1,4 +1,4 @@
-package com.nokia.common.io.excel.entity;
+package com.nokia.tsl_data.util.excel.entity;
 
 import java.awt.Color;
 

+ 1 - 1
src/main/java/com/nokia/common/io/excel/entity/ThreeColorGradient.java → src/main/java/com/nokia/tsl_data/util/excel/entity/ThreeColorGradient.java

@@ -1,4 +1,4 @@
-package com.nokia.common.io.excel.entity;
+package com.nokia.tsl_data.util.excel.entity;
 
 import lombok.Data;
 

+ 1 - 1
src/main/java/com/nokia/common/io/excel/entity/TwoColorGradient.java → src/main/java/com/nokia/tsl_data/util/excel/entity/TwoColorGradient.java

@@ -1,4 +1,4 @@
-package com.nokia.common.io.excel.entity;
+package com.nokia.tsl_data.util.excel.entity;
 
 import java.awt.Color;
 

+ 6 - 5
src/main/java/com/nokia/common/io/excel/poi/PoiUtil.java → src/main/java/com/nokia/tsl_data/util/excel/poi/PoiUtil.java

@@ -1,9 +1,5 @@
-package com.nokia.common.io.excel.poi;
+package com.nokia.tsl_data.util.excel.poi;
 
-import com.nokia.common.io.excel.entity.CellInfo;
-import com.nokia.common.io.excel.entity.CellRect;
-import com.nokia.common.io.excel.entity.Gradient;
-import com.nokia.common.io.excel.entity.ThreeColorGradient;
 import org.apache.poi.hssf.usermodel.HSSFFont;
 import org.apache.poi.hssf.usermodel.HSSFFormulaEvaluator;
 import org.apache.poi.hssf.usermodel.HSSFWorkbook;
@@ -17,6 +13,11 @@ import org.apache.poi.xssf.usermodel.XSSFFont;
 import org.apache.poi.xssf.usermodel.XSSFFormulaEvaluator;
 import org.apache.poi.xssf.usermodel.XSSFWorkbook;
 
+import com.nokia.tsl_data.util.excel.entity.CellInfo;
+import com.nokia.tsl_data.util.excel.entity.CellRect;
+import com.nokia.tsl_data.util.excel.entity.Gradient;
+import com.nokia.tsl_data.util.excel.entity.ThreeColorGradient;
+
 import java.awt.*;
 import java.awt.image.BufferedImage;
 import java.text.DateFormat;

+ 4 - 3
src/main/java/com/nokia/common/http/logging/RequestLogDispatcherServlet.java → src/main/java/com/nokia/tsl_data/util/logging/RequestLogDispatcherServlet.java

@@ -1,10 +1,11 @@
-package com.nokia.common.http.logging;
+package com.nokia.tsl_data.util.logging;
 
-import com.nokia.common.http.logging.entity.RepeatableHttpServletRequestWrapper;
-import com.nokia.common.http.logging.entity.RepeatableHttpServletResponseWrapper;
 import org.springframework.lang.Nullable;
 import org.springframework.web.servlet.DispatcherServlet;
 
+import com.nokia.tsl_data.util.logging.entity.RepeatableHttpServletRequestWrapper;
+import com.nokia.tsl_data.util.logging.entity.RepeatableHttpServletResponseWrapper;
+
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 

+ 3 - 2
src/main/java/com/nokia/common/http/logging/RequestLogHandlerInterceptor.java → src/main/java/com/nokia/tsl_data/util/logging/RequestLogHandlerInterceptor.java

@@ -1,6 +1,5 @@
-package com.nokia.common.http.logging;
+package com.nokia.tsl_data.util.logging;
 
-import com.nokia.common.http.logging.entity.RepeatableHttpServletResponseWrapper;
 import lombok.extern.slf4j.Slf4j;
 import org.slf4j.MDC;
 import org.springframework.lang.Nullable;
@@ -9,6 +8,8 @@ import org.springframework.util.StreamUtils;
 import org.springframework.util.StringUtils;
 import org.springframework.web.servlet.HandlerInterceptor;
 
+import com.nokia.tsl_data.util.logging.entity.RepeatableHttpServletResponseWrapper;
+
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 import java.nio.charset.Charset;

+ 1 - 1
src/main/java/com/nokia/common/http/logging/entity/RepeatableHttpServletRequestWrapper.java → src/main/java/com/nokia/tsl_data/util/logging/entity/RepeatableHttpServletRequestWrapper.java

@@ -1,4 +1,4 @@
-package com.nokia.common.http.logging.entity;
+package com.nokia.tsl_data.util.logging.entity;
 
 import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;

+ 1 - 1
src/main/java/com/nokia/common/http/logging/entity/RepeatableHttpServletResponseWrapper.java → src/main/java/com/nokia/tsl_data/util/logging/entity/RepeatableHttpServletResponseWrapper.java

@@ -1,4 +1,4 @@
-package com.nokia.common.http.logging.entity;
+package com.nokia.tsl_data.util.logging.entity;
 
 import java.io.ByteArrayOutputStream;
 import java.io.IOException;

+ 2 - 2
src/main/resources/application.yml

@@ -3,7 +3,7 @@ server:
 
 Spring:
   profiles:
-    active: pro
+    active: dev
   jpa:
     hibernate:
       ddl-auto: update
@@ -36,6 +36,6 @@ spring:
   profiles: dev
   datasource:
     driver-class-name: org.postgresql.Driver
-    url: jdbc:postgresql://localhost:5432/tsl_data
+    url: jdbc:postgresql://localhost:5432/postgres
     username: postgres
     password: fantuan1985

+ 3 - 44
src/test/java/com/nokia/tsl_data/TslDataApplicationTest.java

@@ -1,59 +1,18 @@
 package com.nokia.tsl_data;
 
-import com.nokia.tsl_data.scheduling.dao.ScheduledTaskMapper;
-import com.nokia.tsl_data.scheduling.entity.ScheduledTask;
-import com.nokia.tsl_data.service.TslReportService;
-import com.nokia.tsl_data.service.UserCountService;
+import com.nokia.tsl_data.service.DataWarehouseService;
 import org.junit.jupiter.api.Test;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.context.SpringBootTest;
 
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.List;
-
 @SpringBootTest
 class TslDataApplicationTest {
 
     @Autowired
-    private UserCountService userCountService;
-
-    /**
-     * 更新管理端用户数--当前阶段需要修改一下模式
-     */
-    @Test
-    void test1() throws IOException {
-        String path = "D:/src/管理端用户数.txt";
-        Files.lines(Paths.get(path), StandardCharsets.UTF_8)
-                .forEach(line -> {
-                    String[] split = line.split("\t");
-                    System.out.println(split[0] + Double.parseDouble(split[2]));
-                    userCountService.updateManagementUserCount("202311", split[0], Double.parseDouble(split[2]));
-                });
-    }
-
-    /**
-     * 更新客户端用户数
-     */
-    @Test
-    void test2() throws IOException {
-        String path = "D:/src/客户端用户数.txt";
-        Files.lines(Paths.get(path), StandardCharsets.UTF_8)
-                .forEach(line -> {
-                    String[] split = line.split("\t");
-                    System.out.println(split[0] + Double.parseDouble(split[1]));
-                    userCountService.updateCustomerUserCount("202311", split[0], Double.parseDouble(split[1]));
-                });
-    }
-
-    @Autowired
-    private TslReportService tslReportService;
+    private DataWarehouseService dataWarehouseService;
 
     @Test
     void test() {
-        tslReportService.generateReportV1("20231223");
-        // tasks.forEach(System.out::println);
+        dataWarehouseService.warehouseComplaintDetailsFixYwdDay("20231224");
     }
 }