feat(ChatKnowledge): Support Financial Report Analysis (#1702)

Co-authored-by: hzh97 <2976151305@qq.com>
Co-authored-by: Fangyin Cheng <staneyffer@gmail.com>
Co-authored-by: licunxing <864255598@qq.com>
main
Aries-ckt 1 year ago committed by GitHub
parent 22e0680a6a
commit 167d972093
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 7
      .env.template
  2. 1
      assets/schema/dbgpt.sql
  3. 3
      assets/schema/upgrade/v0_5_10/upgrade_to_v0.5.10.sql
  4. 396
      assets/schema/upgrade/v0_5_10/v0.5.9.sql
  5. BIN
      assets/wechat.jpg
  6. 28
      dbgpt/_private/config.py
  7. 2
      dbgpt/_version.py
  8. 88
      dbgpt/app/knowledge/api.py
  9. 9
      dbgpt/app/knowledge/request/request.py
  10. 2
      dbgpt/app/knowledge/request/response.py
  11. 6
      dbgpt/app/knowledge/service.py
  12. 83
      dbgpt/app/openapi/api_v1/api_v1.py
  13. 10
      dbgpt/app/scene/base.py
  14. 5
      dbgpt/app/scene/chat_factory.py
  15. 0
      dbgpt/app/scene/chat_knowledge/extract_entity/__init__.py
  16. 29
      dbgpt/app/scene/chat_knowledge/extract_entity/chat.py
  17. 34
      dbgpt/app/scene/chat_knowledge/extract_entity/out_parser.py
  18. 41
      dbgpt/app/scene/chat_knowledge/extract_entity/prompt.py
  19. 0
      dbgpt/app/scene/chat_knowledge/extract_triplet/__init__.py
  20. 29
      dbgpt/app/scene/chat_knowledge/extract_triplet/chat.py
  21. 52
      dbgpt/app/scene/chat_knowledge/extract_triplet/out_parser.py
  22. 51
      dbgpt/app/scene/chat_knowledge/extract_triplet/prompt.py
  23. 4
      dbgpt/app/scene/chat_knowledge/v1/prompt.py
  24. 2
      dbgpt/app/static/404.html
  25. 2
      dbgpt/app/static/404/index.html
  26. 4
      dbgpt/app/static/_next/static/chunks/1009-5d81dfaf6e0efeb1.js
  27. 29
      dbgpt/app/static/_next/static/chunks/1182.2419d83f8d9ef6a3.js
  28. 2
      dbgpt/app/static/_next/static/chunks/1353-1dacbd59a5cf5fb8.js
  29. 6
      dbgpt/app/static/_next/static/chunks/1425-97f488f9d27648f7.js
  30. 2
      dbgpt/app/static/_next/static/chunks/1647-5c6bd87432337e74.js
  31. 0
      dbgpt/app/static/_next/static/chunks/193-b83823cd8ccb6a41.js
  32. 4
      dbgpt/app/static/_next/static/chunks/2057.e751dccfc814df3a.js
  33. 2
      dbgpt/app/static/_next/static/chunks/2185-6a46fbdf54a5364a.js
  34. 9
      dbgpt/app/static/_next/static/chunks/2282-96412afca1591c9a.js
  35. 9
      dbgpt/app/static/_next/static/chunks/2282-e90d1926eaaf3b3b.js
  36. 1
      dbgpt/app/static/_next/static/chunks/2453-26e8f6483c6e4575.js
  37. 2
      dbgpt/app/static/_next/static/chunks/2487-4522eeb3601ff54e.js
  38. 21
      dbgpt/app/static/_next/static/chunks/3378.94e0486b1540a391.js
  39. 4
      dbgpt/app/static/_next/static/chunks/3444-1911da618e1e8971.js
  40. 1
      dbgpt/app/static/_next/static/chunks/355a6ca7.668cb9af53ba68bc.js
  41. 1
      dbgpt/app/static/_next/static/chunks/355a6ca7.9ed0e7fb77828d28.js
  42. 2
      dbgpt/app/static/_next/static/chunks/3718-87572fc24f1c1cdf.js
  43. 2
      dbgpt/app/static/_next/static/chunks/411-3e1adedff6595f9e.js
  44. 1
      dbgpt/app/static/_next/static/chunks/4134.d59cf294103a4db2.js
  45. 1
      dbgpt/app/static/_next/static/chunks/4134.d80f15f013b13337.js
  46. 6
      dbgpt/app/static/_next/static/chunks/4553-2eeeec162e6b9d24.js
  47. 1
      dbgpt/app/static/_next/static/chunks/4835.0dd93e5756341d75.js
  48. 1
      dbgpt/app/static/_next/static/chunks/4835.da0dc28fd35c4aee.js
  49. 4
      dbgpt/app/static/_next/static/chunks/5237-f8ce62e2a793a23a.js
  50. 2
      dbgpt/app/static/_next/static/chunks/5503-f73cb46e78278f42.js
  51. 2
      dbgpt/app/static/_next/static/chunks/5510.28f6c94427988bd8.js
  52. 6
      dbgpt/app/static/_next/static/chunks/5733-ec2a588444393e17.js
  53. 2
      dbgpt/app/static/_next/static/chunks/5813-ba0135c147bac9a0.js
  54. 2
      dbgpt/app/static/_next/static/chunks/6165-48eaed9a80fbbd1b.js
  55. 72
      dbgpt/app/static/_next/static/chunks/6913.a9947645ef8eb4cb.js
  56. 81
      dbgpt/app/static/_next/static/chunks/7119-64fb8f0364433c24.js
  57. 81
      dbgpt/app/static/_next/static/chunks/7184-3ca3f58327a6986a.js
  58. 2
      dbgpt/app/static/_next/static/chunks/75fc9c18-1d6133135d3d283c.js
  59. 2
      dbgpt/app/static/_next/static/chunks/75fc9c18-f5c95b15762b1b2f.js
  60. 2
      dbgpt/app/static/_next/static/chunks/785-7baed2336ce7962c.js
  61. 1
      dbgpt/app/static/_next/static/chunks/7869-1a99e25b182b3eaa.js
  62. 2
      dbgpt/app/static/_next/static/chunks/8719.94582b395ce9745b.js
  63. 2
      dbgpt/app/static/_next/static/chunks/8928-0dd0f412ae0f4962.js
  64. 2
      dbgpt/app/static/_next/static/chunks/90912e1b-91352761084b91ff.js
  65. 4
      dbgpt/app/static/_next/static/chunks/9305-eb817abebcfffa20.js
  66. 9
      dbgpt/app/static/_next/static/chunks/9479-21f588e1fd4e6b6d.js
  67. 9
      dbgpt/app/static/_next/static/chunks/9479-8a6b32582d10ba1f.js
  68. 1
      dbgpt/app/static/_next/static/chunks/971df74e-084169c6c09eb1fe.js
  69. 1
      dbgpt/app/static/_next/static/chunks/971df74e-7436ff4085ebb785.js
  70. 0
      dbgpt/app/static/_next/static/chunks/9924-42c72dae9efe5ccd.js
  71. 0
      dbgpt/app/static/_next/static/chunks/framework-398a6286d7178304.js
  72. 0
      dbgpt/app/static/_next/static/chunks/main-0b93b9919f9e60f7.js
  73. 176
      dbgpt/app/static/_next/static/chunks/pages/_app-cb8689a6e6530490.js
  74. 175
      dbgpt/app/static/_next/static/chunks/pages/_app-d0edbdcb9ec8a0fc.js
  75. 2
      dbgpt/app/static/_next/static/chunks/pages/agent-baf4571810cdd046.js
  76. 2
      dbgpt/app/static/_next/static/chunks/pages/app-66a520069742bfcc.js
  77. 2
      dbgpt/app/static/_next/static/chunks/pages/chat-dc50c4cc53e5b3a0.js
  78. 2
      dbgpt/app/static/_next/static/chunks/pages/database-0428b7022de673a0.js
  79. 4
      dbgpt/app/static/_next/static/chunks/pages/flow-18c806ed1099f71f.js
  80. 2
      dbgpt/app/static/_next/static/chunks/pages/flow/canvas-e8ea0a0f7aee0e13.js
  81. 1
      dbgpt/app/static/_next/static/chunks/pages/index-0b2d61c1c6358f20.js
  82. 1
      dbgpt/app/static/_next/static/chunks/pages/index-217e7be2e89f3434.js
  83. 1
      dbgpt/app/static/_next/static/chunks/pages/knowledge-223d50e9531bd961.js
  84. 1
      dbgpt/app/static/_next/static/chunks/pages/knowledge-71bdd8ab0d1d3756.js
  85. 2
      dbgpt/app/static/_next/static/chunks/pages/knowledge/chunk-7326b8534d2f9172.js
  86. 0
      dbgpt/app/static/_next/static/chunks/pages/knowledge/graph-f0b2c9d145d2c446.js
  87. 2
      dbgpt/app/static/_next/static/chunks/pages/models-091bfc790579fe32.js
  88. 1
      dbgpt/app/static/_next/static/chunks/pages/prompt-c44ac718b4d637c9.js
  89. 1
      dbgpt/app/static/_next/static/chunks/pages/prompt-f0ec387782dbed90.js
  90. 1
      dbgpt/app/static/_next/static/chunks/webpack-30a89b441f28d1b6.js
  91. 1
      dbgpt/app/static/_next/static/chunks/webpack-4333f5e6702e31c0.js
  92. 1
      dbgpt/app/static/_next/static/css/a275cc2b185e04f8.css
  93. 1
      dbgpt/app/static/_next/static/css/b4846eed11c4725f.css
  94. 2
      dbgpt/app/static/_next/static/css/bfb55bd78210e323.css
  95. 1
      dbgpt/app/static/_next/static/kmKCVS0DEWcBwSlV6vbJo/_buildManifest.js
  96. 1
      dbgpt/app/static/_next/static/knRb7rLfqH1TVUFNWVC9a/_buildManifest.js
  97. 0
      dbgpt/app/static/_next/static/knRb7rLfqH1TVUFNWVC9a/_ssgManifest.js
  98. 2
      dbgpt/app/static/agent/index.html
  99. 2
      dbgpt/app/static/app/index.html
  100. 2
      dbgpt/app/static/chat/index.html
  101. Some files were not shown because too many files have changed in this diff Show More

@ -292,4 +292,9 @@ DBGPT_LOG_LEVEL=INFO
# OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE=
# OTEL_EXPORTER_OTLP_TRACES_HEADERS=
# OTEL_EXPORTER_OTLP_TRACES_TIMEOUT=
# OTEL_EXPORTER_OTLP_TRACES_COMPRESSION=
# OTEL_EXPORTER_OTLP_TRACES_COMPRESSION=
#*******************************************************************#
#** FINANCIAL CHAT Config **#
#*******************************************************************#
# FIN_REPORT_MODEL=/app/models/bge-large-zh

@ -17,6 +17,7 @@ CREATE TABLE IF NOT EXISTS `knowledge_space`
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
`name` varchar(100) NOT NULL COMMENT 'knowledge space name',
`vector_type` varchar(50) NOT NULL COMMENT 'vector type',
`domain_type` varchar(50) NOT NULL COMMENT 'domain type',
`desc` varchar(500) NOT NULL COMMENT 'description',
`owner` varchar(100) DEFAULT NULL COMMENT 'owner',
`context` TEXT DEFAULT NULL COMMENT 'context argument',

@ -0,0 +1,3 @@
USE dbgpt;
ALTER TABLE knowledge_space
ADD COLUMN `domain_type` varchar(50) null comment 'space domain type' after `vector_type`;

@ -0,0 +1,396 @@
-- Full SQL of v0.5.9, please not modify this file(It must be same as the file in the release package)
CREATE
DATABASE IF NOT EXISTS dbgpt;
use dbgpt;
-- For alembic migration tool
CREATE TABLE IF NOT EXISTS `alembic_version`
(
version_num VARCHAR(32) NOT NULL,
CONSTRAINT alembic_version_pkc PRIMARY KEY (version_num)
) DEFAULT CHARSET=utf8mb4 ;
CREATE TABLE IF NOT EXISTS `knowledge_space`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
`name` varchar(100) NOT NULL COMMENT 'knowledge space name',
`vector_type` varchar(50) NOT NULL COMMENT 'vector type',
`desc` varchar(500) NOT NULL COMMENT 'description',
`owner` varchar(100) DEFAULT NULL COMMENT 'owner',
`context` TEXT DEFAULT NULL COMMENT 'context argument',
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
PRIMARY KEY (`id`),
KEY `idx_name` (`name`) COMMENT 'index:idx_name'
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge space table';
CREATE TABLE IF NOT EXISTS `knowledge_document`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
`doc_name` varchar(100) NOT NULL COMMENT 'document path name',
`doc_type` varchar(50) NOT NULL COMMENT 'doc type',
`space` varchar(50) NOT NULL COMMENT 'knowledge space',
`chunk_size` int NOT NULL COMMENT 'chunk size',
`last_sync` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'last sync time',
`status` varchar(50) NOT NULL COMMENT 'status TODO,RUNNING,FAILED,FINISHED',
`content` LONGTEXT NOT NULL COMMENT 'knowledge embedding sync result',
`result` TEXT NULL COMMENT 'knowledge content',
`vector_ids` LONGTEXT NULL COMMENT 'vector_ids',
`summary` LONGTEXT NULL COMMENT 'knowledge summary',
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
PRIMARY KEY (`id`),
KEY `idx_doc_name` (`doc_name`) COMMENT 'index:idx_doc_name'
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge document table';
CREATE TABLE IF NOT EXISTS `document_chunk`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'auto increment id',
`doc_name` varchar(100) NOT NULL COMMENT 'document path name',
`doc_type` varchar(50) NOT NULL COMMENT 'doc type',
`document_id` int NOT NULL COMMENT 'document parent id',
`content` longtext NOT NULL COMMENT 'chunk content',
`meta_info` varchar(200) NOT NULL COMMENT 'metadata info',
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
PRIMARY KEY (`id`),
KEY `idx_document_id` (`document_id`) COMMENT 'index:document_id'
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='knowledge document chunk detail';
CREATE TABLE IF NOT EXISTS `connect_config`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`db_type` varchar(255) NOT NULL COMMENT 'db type',
`db_name` varchar(255) NOT NULL COMMENT 'db name',
`db_path` varchar(255) DEFAULT NULL COMMENT 'file db path',
`db_host` varchar(255) DEFAULT NULL COMMENT 'db connect host(not file db)',
`db_port` varchar(255) DEFAULT NULL COMMENT 'db cnnect port(not file db)',
`db_user` varchar(255) DEFAULT NULL COMMENT 'db user',
`db_pwd` varchar(255) DEFAULT NULL COMMENT 'db password',
`comment` text COMMENT 'db comment',
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_db` (`db_name`),
KEY `idx_q_db_type` (`db_type`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT 'Connection confi';
CREATE TABLE IF NOT EXISTS `chat_history`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`conv_uid` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record unique id',
`chat_mode` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation scene mode',
`summary` longtext COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record summary',
`user_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'interlocutor',
`messages` text COLLATE utf8mb4_unicode_ci COMMENT 'Conversation details',
`message_ids` text COLLATE utf8mb4_unicode_ci COMMENT 'Message id list, split by comma',
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
UNIQUE KEY `conv_uid` (`conv_uid`),
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT 'Chat history';
CREATE TABLE IF NOT EXISTS `chat_history_message`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`conv_uid` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'Conversation record unique id',
`index` int NOT NULL COMMENT 'Message index',
`round_index` int NOT NULL COMMENT 'Round of conversation',
`message_detail` text COLLATE utf8mb4_unicode_ci COMMENT 'Message details, json format',
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
UNIQUE KEY `message_uid_index` (`conv_uid`, `index`),
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT 'Chat history message';
CREATE TABLE IF NOT EXISTS `chat_feed_back`
(
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`conv_uid` varchar(128) DEFAULT NULL COMMENT 'Conversation ID',
`conv_index` int(4) DEFAULT NULL COMMENT 'Round of conversation',
`score` int(1) DEFAULT NULL COMMENT 'Score of user',
`ques_type` varchar(32) DEFAULT NULL COMMENT 'User question category',
`question` longtext DEFAULT NULL COMMENT 'User question',
`knowledge_space` varchar(128) DEFAULT NULL COMMENT 'Knowledge space name',
`messages` longtext DEFAULT NULL COMMENT 'The details of user feedback',
`user_name` varchar(128) DEFAULT NULL COMMENT 'User name',
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_conv` (`conv_uid`,`conv_index`),
KEY `idx_conv` (`conv_uid`,`conv_index`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='User feedback table';
CREATE TABLE IF NOT EXISTS `my_plugin`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`tenant` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'user tenant',
`user_code` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'user code',
`user_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'user name',
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin name',
`file_name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin package file name',
`type` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin type',
`version` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin version',
`use_count` int DEFAULT NULL COMMENT 'plugin total use count',
`succ_count` int DEFAULT NULL COMMENT 'plugin total success count',
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'plugin install time',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='User plugin table';
CREATE TABLE IF NOT EXISTS `plugin_hub`
(
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`name` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin name',
`description` varchar(255) COLLATE utf8mb4_unicode_ci NOT NULL COMMENT 'plugin description',
`author` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin author',
`email` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin author email',
`type` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin type',
`version` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin version',
`storage_channel` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin storage channel',
`storage_url` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin download url',
`download_param` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'plugin download param',
`gmt_created` TIMESTAMP DEFAULT CURRENT_TIMESTAMP COMMENT 'plugin upload time',
`installed` int DEFAULT NULL COMMENT 'plugin already installed count',
PRIMARY KEY (`id`),
UNIQUE KEY `name` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='Plugin Hub table';
CREATE TABLE IF NOT EXISTS `prompt_manage`
(
`id` int(11) NOT NULL AUTO_INCREMENT,
`chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Chat scene',
`sub_chat_scene` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Sub chat scene',
`prompt_type` varchar(100) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt type: common or private',
`prompt_name` varchar(256) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'prompt name',
`content` longtext COLLATE utf8mb4_unicode_ci COMMENT 'Prompt content',
`input_variables` varchar(1024) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt input variables(split by comma))',
`model` varchar(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt model name(we can use different models for different prompt)',
`prompt_language` varchar(32) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt language(eg:en, zh-cn)',
`prompt_format` varchar(32) COLLATE utf8mb4_unicode_ci DEFAULT 'f-string' COMMENT 'Prompt format(eg: f-string, jinja2)',
`prompt_desc` varchar(512) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'Prompt description',
`user_name` varchar(128) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT 'User name',
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
`gmt_created` timestamp NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
`gmt_modified` timestamp NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP COMMENT 'update time',
PRIMARY KEY (`id`),
UNIQUE KEY `prompt_name_uiq` (`prompt_name`, `sys_code`, `prompt_language`, `model`),
KEY `gmt_created_idx` (`gmt_created`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='Prompt management table';
CREATE TABLE IF NOT EXISTS `gpts_conversations` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`conv_id` varchar(255) NOT NULL COMMENT 'The unique id of the conversation record',
`user_goal` text NOT NULL COMMENT 'User''s goals content',
`gpts_name` varchar(255) NOT NULL COMMENT 'The gpts name',
`state` varchar(255) DEFAULT NULL COMMENT 'The gpts state',
`max_auto_reply_round` int(11) NOT NULL COMMENT 'max auto reply round',
`auto_reply_count` int(11) NOT NULL COMMENT 'auto reply count',
`user_code` varchar(255) DEFAULT NULL COMMENT 'user code',
`sys_code` varchar(255) DEFAULT NULL COMMENT 'system app ',
`created_at` datetime DEFAULT NULL COMMENT 'create time',
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
`team_mode` varchar(255) NULL COMMENT 'agent team work mode',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_gpts_conversations` (`conv_id`),
KEY `idx_gpts_name` (`gpts_name`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpt conversations";
CREATE TABLE IF NOT EXISTS `gpts_instance` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`gpts_name` varchar(255) NOT NULL COMMENT 'Current AI assistant name',
`gpts_describe` varchar(2255) NOT NULL COMMENT 'Current AI assistant describe',
`resource_db` text COMMENT 'List of structured database names contained in the current gpts',
`resource_internet` text COMMENT 'Is it possible to retrieve information from the internet',
`resource_knowledge` text COMMENT 'List of unstructured database names contained in the current gpts',
`gpts_agents` varchar(1000) DEFAULT NULL COMMENT 'List of agents names contained in the current gpts',
`gpts_models` varchar(1000) DEFAULT NULL COMMENT 'List of llm model names contained in the current gpts',
`language` varchar(100) DEFAULT NULL COMMENT 'gpts language',
`user_code` varchar(255) NOT NULL COMMENT 'user code',
`sys_code` varchar(255) DEFAULT NULL COMMENT 'system app code',
`created_at` datetime DEFAULT NULL COMMENT 'create time',
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
`team_mode` varchar(255) NOT NULL COMMENT 'Team work mode',
`is_sustainable` tinyint(1) NOT NULL COMMENT 'Applications for sustainable dialogue',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_gpts` (`gpts_name`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpts instance";
CREATE TABLE `gpts_messages` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`conv_id` varchar(255) NOT NULL COMMENT 'The unique id of the conversation record',
`sender` varchar(255) NOT NULL COMMENT 'Who speaking in the current conversation turn',
`receiver` varchar(255) NOT NULL COMMENT 'Who receive message in the current conversation turn',
`model_name` varchar(255) DEFAULT NULL COMMENT 'message generate model',
`rounds` int(11) NOT NULL COMMENT 'dialogue turns',
`content` text COMMENT 'Content of the speech',
`current_goal` text COMMENT 'The target corresponding to the current message',
`context` text COMMENT 'Current conversation context',
`review_info` text COMMENT 'Current conversation review info',
`action_report` text COMMENT 'Current conversation action report',
`role` varchar(255) DEFAULT NULL COMMENT 'The role of the current message content',
`created_at` datetime DEFAULT NULL COMMENT 'create time',
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
PRIMARY KEY (`id`),
KEY `idx_q_messages` (`conv_id`,`rounds`,`sender`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpts message";
CREATE TABLE `gpts_plans` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`conv_id` varchar(255) NOT NULL COMMENT 'The unique id of the conversation record',
`sub_task_num` int(11) NOT NULL COMMENT 'Subtask number',
`sub_task_title` varchar(255) NOT NULL COMMENT 'subtask title',
`sub_task_content` text NOT NULL COMMENT 'subtask content',
`sub_task_agent` varchar(255) DEFAULT NULL COMMENT 'Available agents corresponding to subtasks',
`resource_name` varchar(255) DEFAULT NULL COMMENT 'resource name',
`rely` varchar(255) DEFAULT NULL COMMENT 'Subtask dependencies,like: 1,2,3',
`agent_model` varchar(255) DEFAULT NULL COMMENT 'LLM model used by subtask processing agents',
`retry_times` int(11) DEFAULT NULL COMMENT 'number of retries',
`max_retry_times` int(11) DEFAULT NULL COMMENT 'Maximum number of retries',
`state` varchar(255) DEFAULT NULL COMMENT 'subtask status',
`result` longtext COMMENT 'subtask result',
`created_at` datetime DEFAULT NULL COMMENT 'create time',
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_sub_task` (`conv_id`,`sub_task_num`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpt plan";
-- dbgpt.dbgpt_serve_flow definition
CREATE TABLE `dbgpt_serve_flow` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'Auto increment id',
`uid` varchar(128) NOT NULL COMMENT 'Unique id',
`dag_id` varchar(128) DEFAULT NULL COMMENT 'DAG id',
`name` varchar(128) DEFAULT NULL COMMENT 'Flow name',
`flow_data` text COMMENT 'Flow data, JSON format',
`user_name` varchar(128) DEFAULT NULL COMMENT 'User name',
`sys_code` varchar(128) DEFAULT NULL COMMENT 'System code',
`gmt_created` datetime DEFAULT NULL COMMENT 'Record creation time',
`gmt_modified` datetime DEFAULT NULL COMMENT 'Record update time',
`flow_category` varchar(64) DEFAULT NULL COMMENT 'Flow category',
`description` varchar(512) DEFAULT NULL COMMENT 'Flow description',
`state` varchar(32) DEFAULT NULL COMMENT 'Flow state',
`error_message` varchar(512) NULL comment 'Error message',
`source` varchar(64) DEFAULT NULL COMMENT 'Flow source',
`source_url` varchar(512) DEFAULT NULL COMMENT 'Flow source url',
`version` varchar(32) DEFAULT NULL COMMENT 'Flow version',
`define_type` varchar(32) null comment 'Flow define type(json or python)',
`label` varchar(128) DEFAULT NULL COMMENT 'Flow label',
`editable` int DEFAULT NULL COMMENT 'Editable, 0: editable, 1: not editable',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_uid` (`uid`),
KEY `ix_dbgpt_serve_flow_sys_code` (`sys_code`),
KEY `ix_dbgpt_serve_flow_uid` (`uid`),
KEY `ix_dbgpt_serve_flow_dag_id` (`dag_id`),
KEY `ix_dbgpt_serve_flow_user_name` (`user_name`),
KEY `ix_dbgpt_serve_flow_name` (`name`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
-- dbgpt.gpts_app definition
CREATE TABLE `gpts_app` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`app_code` varchar(255) NOT NULL COMMENT 'Current AI assistant code',
`app_name` varchar(255) NOT NULL COMMENT 'Current AI assistant name',
`app_describe` varchar(2255) NOT NULL COMMENT 'Current AI assistant describe',
`language` varchar(100) NOT NULL COMMENT 'gpts language',
`team_mode` varchar(255) NOT NULL COMMENT 'Team work mode',
`team_context` text COMMENT 'The execution logic and team member content that teams with different working modes rely on',
`user_code` varchar(255) DEFAULT NULL COMMENT 'user code',
`sys_code` varchar(255) DEFAULT NULL COMMENT 'system app code',
`created_at` datetime DEFAULT NULL COMMENT 'create time',
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
`icon` varchar(1024) DEFAULT NULL COMMENT 'app icon, url',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_gpts_app` (`app_name`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE TABLE `gpts_app_collection` (
`id` int(11) NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`app_code` varchar(255) NOT NULL COMMENT 'Current AI assistant code',
`user_code` int(11) NOT NULL COMMENT 'user code',
`sys_code` varchar(255) NOT NULL COMMENT 'system app code',
`created_at` datetime DEFAULT NULL COMMENT 'create time',
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
PRIMARY KEY (`id`),
KEY `idx_app_code` (`app_code`),
KEY `idx_user_code` (`user_code`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT="gpt collections";
-- dbgpt.gpts_app_detail definition
CREATE TABLE `gpts_app_detail` (
`id` int NOT NULL AUTO_INCREMENT COMMENT 'autoincrement id',
`app_code` varchar(255) NOT NULL COMMENT 'Current AI assistant code',
`app_name` varchar(255) NOT NULL COMMENT 'Current AI assistant name',
`agent_name` varchar(255) NOT NULL COMMENT ' Agent name',
`node_id` varchar(255) NOT NULL COMMENT 'Current AI assistant Agent Node id',
`resources` text COMMENT 'Agent bind resource',
`prompt_template` text COMMENT 'Agent bind template',
`llm_strategy` varchar(25) DEFAULT NULL COMMENT 'Agent use llm strategy',
`llm_strategy_value` text COMMENT 'Agent use llm strategy value',
`created_at` datetime DEFAULT NULL COMMENT 'create time',
`updated_at` datetime DEFAULT NULL COMMENT 'last update time',
PRIMARY KEY (`id`),
UNIQUE KEY `uk_gpts_app_agent_node` (`app_name`,`agent_name`,`node_id`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
CREATE
DATABASE IF NOT EXISTS EXAMPLE_1;
use EXAMPLE_1;
CREATE TABLE IF NOT EXISTS `users`
(
`id` int NOT NULL AUTO_INCREMENT,
`username` varchar(50) NOT NULL COMMENT '用户名',
`password` varchar(50) NOT NULL COMMENT '密码',
`email` varchar(50) NOT NULL COMMENT '邮箱',
`phone` varchar(20) DEFAULT NULL COMMENT '电话',
PRIMARY KEY (`id`),
KEY `idx_username` (`username`) COMMENT '索引:按用户名查询'
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COMMENT='聊天用户表';
INSERT INTO users (username, password, email, phone)
VALUES ('user_1', 'password_1', 'user_1@example.com', '12345678901');
INSERT INTO users (username, password, email, phone)
VALUES ('user_2', 'password_2', 'user_2@example.com', '12345678902');
INSERT INTO users (username, password, email, phone)
VALUES ('user_3', 'password_3', 'user_3@example.com', '12345678903');
INSERT INTO users (username, password, email, phone)
VALUES ('user_4', 'password_4', 'user_4@example.com', '12345678904');
INSERT INTO users (username, password, email, phone)
VALUES ('user_5', 'password_5', 'user_5@example.com', '12345678905');
INSERT INTO users (username, password, email, phone)
VALUES ('user_6', 'password_6', 'user_6@example.com', '12345678906');
INSERT INTO users (username, password, email, phone)
VALUES ('user_7', 'password_7', 'user_7@example.com', '12345678907');
INSERT INTO users (username, password, email, phone)
VALUES ('user_8', 'password_8', 'user_8@example.com', '12345678908');
INSERT INTO users (username, password, email, phone)
VALUES ('user_9', 'password_9', 'user_9@example.com', '12345678909');
INSERT INTO users (username, password, email, phone)
VALUES ('user_10', 'password_10', 'user_10@example.com', '12345678900');
INSERT INTO users (username, password, email, phone)
VALUES ('user_11', 'password_11', 'user_11@example.com', '12345678901');
INSERT INTO users (username, password, email, phone)
VALUES ('user_12', 'password_12', 'user_12@example.com', '12345678902');
INSERT INTO users (username, password, email, phone)
VALUES ('user_13', 'password_13', 'user_13@example.com', '12345678903');
INSERT INTO users (username, password, email, phone)
VALUES ('user_14', 'password_14', 'user_14@example.com', '12345678904');
INSERT INTO users (username, password, email, phone)
VALUES ('user_15', 'password_15', 'user_15@example.com', '12345678905');
INSERT INTO users (username, password, email, phone)
VALUES ('user_16', 'password_16', 'user_16@example.com', '12345678906');
INSERT INTO users (username, password, email, phone)
VALUES ('user_17', 'password_17', 'user_17@example.com', '12345678907');
INSERT INTO users (username, password, email, phone)
VALUES ('user_18', 'password_18', 'user_18@example.com', '12345678908');
INSERT INTO users (username, password, email, phone)
VALUES ('user_19', 'password_19', 'user_19@example.com', '12345678909');
INSERT INTO users (username, password, email, phone)
VALUES ('user_20', 'password_20', 'user_20@example.com', '12345678900');

Binary file not shown.

Before

Width:  |  Height:  |  Size: 118 KiB

After

Width:  |  Height:  |  Size: 118 KiB

@ -166,10 +166,10 @@ class Config(metaclass=Singleton):
self.execute_local_commands = (
os.getenv("EXECUTE_LOCAL_COMMANDS", "False").lower() == "true"
)
### message stor file
# message stor file
self.message_dir = os.getenv("MESSAGE_HISTORY_DIR", "../../message")
### Native SQL Execution Capability Control Configuration
# Native SQL Execution Capability Control Configuration
self.NATIVE_SQL_CAN_RUN_DDL = (
os.getenv("NATIVE_SQL_CAN_RUN_DDL", "True").lower() == "true"
)
@ -177,7 +177,7 @@ class Config(metaclass=Singleton):
os.getenv("NATIVE_SQL_CAN_RUN_WRITE", "True").lower() == "true"
)
### dbgpt meta info database connection configuration
# dbgpt meta info database connection configuration
self.LOCAL_DB_HOST = os.getenv("LOCAL_DB_HOST")
self.LOCAL_DB_PATH = os.getenv("LOCAL_DB_PATH", "data/default_sqlite.db")
self.LOCAL_DB_TYPE = os.getenv("LOCAL_DB_TYPE", "sqlite")
@ -193,13 +193,13 @@ class Config(metaclass=Singleton):
self.CHAT_HISTORY_STORE_TYPE = os.getenv("CHAT_HISTORY_STORE_TYPE", "db")
### LLM Model Service Configuration
# LLM Model Service Configuration
self.LLM_MODEL = os.getenv("LLM_MODEL", "glm-4-9b-chat")
self.LLM_MODEL_PATH = os.getenv("LLM_MODEL_PATH")
### Proxy llm backend, this configuration is only valid when "LLM_MODEL=proxyllm"
### When we use the rest API provided by deployment frameworks like fastchat as a proxyllm, "PROXYLLM_BACKEND" is the model they actually deploy.
### We need to use "PROXYLLM_BACKEND" to load the prompt of the corresponding scene.
# Proxy llm backend, this configuration is only valid when "LLM_MODEL=proxyllm"
# When we use the rest API provided by deployment frameworks like fastchat as a proxyllm, "PROXYLLM_BACKEND" is the model they actually deploy.
# We need to use "PROXYLLM_BACKEND" to load the prompt of the corresponding scene.
self.PROXYLLM_BACKEND = None
if self.LLM_MODEL == "proxyllm":
self.PROXYLLM_BACKEND = os.getenv("PROXYLLM_BACKEND")
@ -211,7 +211,7 @@ class Config(metaclass=Singleton):
"MODEL_SERVER", "http://127.0.0.1" + ":" + str(self.MODEL_PORT)
)
### Vector Store Configuration
# Vector Store Configuration
self.VECTOR_STORE_TYPE = os.getenv("VECTOR_STORE_TYPE", "Chroma")
self.MILVUS_URL = os.getenv("MILVUS_URL", "127.0.0.1")
self.MILVUS_PORT = os.getenv("MILVUS_PORT", "19530")
@ -223,7 +223,7 @@ class Config(metaclass=Singleton):
self.ELASTICSEARCH_USERNAME = os.getenv("ELASTICSEARCH_USERNAME", None)
self.ELASTICSEARCH_PASSWORD = os.getenv("ELASTICSEARCH_PASSWORD", None)
## OceanBase Configuration
# OceanBase Configuration
self.OB_HOST = os.getenv("OB_HOST", "127.0.0.1")
self.OB_PORT = int(os.getenv("OB_PORT", "2881"))
self.OB_USER = os.getenv("OB_USER", "root")
@ -245,7 +245,7 @@ class Config(metaclass=Singleton):
os.environ["load_8bit"] = str(self.IS_LOAD_8BIT)
os.environ["load_4bit"] = str(self.IS_LOAD_4BIT)
### EMBEDDING Configuration
# EMBEDDING Configuration
self.EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "text2vec")
# Rerank model configuration
self.RERANK_MODEL = os.getenv("RERANK_MODEL")
@ -276,17 +276,17 @@ class Config(metaclass=Singleton):
os.getenv("KNOWLEDGE_CHAT_SHOW_RELATIONS", "False").lower() == "true"
)
### SUMMARY_CONFIG Configuration
# SUMMARY_CONFIG Configuration
self.SUMMARY_CONFIG = os.getenv("SUMMARY_CONFIG", "FAST")
self.MAX_GPU_MEMORY = os.getenv("MAX_GPU_MEMORY", None)
### Log level
# Log level
self.DBGPT_LOG_LEVEL = os.getenv("DBGPT_LOG_LEVEL", "INFO")
self.SYSTEM_APP: Optional["SystemApp"] = None
### Temporary configuration
# Temporary configuration
self.USE_FASTCHAT: bool = os.getenv("USE_FASTCHAT", "True").lower() == "true"
self.MODEL_CACHE_ENABLE: bool = (
@ -312,6 +312,8 @@ class Config(metaclass=Singleton):
self.DBGPT_APP_SCENE_NON_STREAMING_PARALLELISM_BASE = int(
os.getenv("DBGPT_APP_SCENE_NON_STREAMING_PARALLELISM_BASE", 1)
)
# experimental financial report model configuration
self.FIN_REPORT_MODEL = os.getenv("FIN_REPORT_MODEL", None)
@property
def local_db_manager(self) -> "ConnectorManager":

@ -1 +1 @@
version = "0.5.9"
version = "0.5.10"

@ -23,19 +23,27 @@ from dbgpt.app.knowledge.request.response import KnowledgeQueryResponse
from dbgpt.app.knowledge.service import KnowledgeService
from dbgpt.app.openapi.api_v1.api_v1 import no_stream_generator, stream_generator
from dbgpt.app.openapi.api_view_model import Result
from dbgpt.configs import TAG_KEY_KNOWLEDGE_FACTORY_DOMAIN_TYPE
from dbgpt.configs.model_config import (
EMBEDDING_MODEL_CONFIG,
KNOWLEDGE_UPLOAD_ROOT_PATH,
)
from dbgpt.core.awel.dag.dag_manager import DAGManager
from dbgpt.rag import ChunkParameters
from dbgpt.rag.embedding.embedding_factory import EmbeddingFactory
from dbgpt.rag.knowledge.base import ChunkStrategy
from dbgpt.rag.knowledge.factory import KnowledgeFactory
from dbgpt.rag.retriever.embedding import EmbeddingRetriever
from dbgpt.serve.rag.api.schemas import KnowledgeSyncRequest
from dbgpt.serve.rag.api.schemas import (
KnowledgeConfigResponse,
KnowledgeDomainType,
KnowledgeStorageType,
KnowledgeSyncRequest,
)
from dbgpt.serve.rag.connector import VectorStoreConnector
from dbgpt.serve.rag.service.service import Service
from dbgpt.storage.vector_store.base import VectorStoreConfig
from dbgpt.util.i18n_utils import _
from dbgpt.util.tracer import SpanType, root_tracer
logger = logging.getLogger(__name__)
@ -52,6 +60,11 @@ def get_rag_service() -> Service:
return Service.get_instance(CFG.SYSTEM_APP)
def get_dag_manager() -> DAGManager:
"""Get DAG Manager."""
return DAGManager.get_instance(CFG.SYSTEM_APP)
@router.post("/knowledge/space/add")
def space_add(request: KnowledgeSpaceRequest):
print(f"/space/add params: {request}")
@ -147,6 +160,55 @@ def chunk_strategies():
return Result.failed(code="E000X", msg=f"chunk strategies error {e}")
@router.get("/knowledge/space/config", response_model=Result[KnowledgeConfigResponse])
async def space_config() -> Result[KnowledgeConfigResponse]:
"""Get space config"""
try:
storage_list: List[KnowledgeStorageType] = []
dag_manager: DAGManager = get_dag_manager()
# Vector Storage
vs_domain_types = [KnowledgeDomainType(name="Normal", desc="Normal")]
dag_map = dag_manager.get_dags_by_tag_key(TAG_KEY_KNOWLEDGE_FACTORY_DOMAIN_TYPE)
for domain_type, dags in dag_map.items():
vs_domain_types.append(
KnowledgeDomainType(
name=domain_type, desc=dags[0].description or domain_type
)
)
storage_list.append(
KnowledgeStorageType(
name="VectorStore",
desc=_("Vector Store"),
domain_types=vs_domain_types,
)
)
# Graph Storage
storage_list.append(
KnowledgeStorageType(
name="KnowledgeGraph",
desc=_("Knowledge Graph"),
domain_types=[KnowledgeDomainType(name="Normal", desc="Normal")],
)
)
# Full Text
storage_list.append(
KnowledgeStorageType(
name="FullText",
desc=_("Full Text"),
domain_types=[KnowledgeDomainType(name="Normal", desc="Normal")],
)
)
return Result.succ(
KnowledgeConfigResponse(
storage=storage_list,
)
)
except Exception as e:
return Result.failed(code="E000X", msg=f"space config error {e}")
@router.post("/knowledge/{space_name}/document/list")
def document_list(space_name: str, query_request: DocumentQueryRequest):
print(f"/document/list params: {space_name}, {query_request}")
@ -350,27 +412,3 @@ async def document_summary(request: DocumentSummaryRequest):
)
except Exception as e:
return Result.failed(code="E000X", msg=f"document summary error {e}")
@router.post("/knowledge/entity/extract")
async def entity_extract(request: EntityExtractRequest):
logger.info(f"Received params: {request}")
try:
import uuid
from dbgpt.app.scene import ChatScene
from dbgpt.util.chat_util import llm_chat_response_nostream
chat_param = {
"chat_session_id": uuid.uuid1(),
"current_user_input": request.text,
"select_param": "entity",
"model_name": request.model_name,
}
res = await llm_chat_response_nostream(
ChatScene.ExtractEntity.value(), **{"chat_param": chat_param}
)
return Result.succ(res)
except Exception as e:
return Result.failed(code="E000X", msg=f"entity extract error {e}")

@ -1,3 +1,4 @@
from enum import Enum
from typing import List, Optional
from dbgpt._private.pydantic import BaseModel, ConfigDict
@ -19,12 +20,20 @@ class KnowledgeSpaceRequest(BaseModel):
name: str = None
"""vector_type: vector type"""
vector_type: str = None
"""vector_type: vector type"""
domain_type: str = "normal"
"""desc: description"""
desc: str = None
"""owner: owner"""
owner: str = None
class BusinessFieldType(Enum):
"""BusinessFieldType"""
NORMAL = "Normal"
class KnowledgeDocumentRequest(BaseModel):
"""doc_name: doc path"""

@ -33,6 +33,8 @@ class SpaceQueryResponse(BaseModel):
name: str = None
"""vector_type: vector type"""
vector_type: str = None
"""field_type: field type"""
domain_type: str = None
"""desc: description"""
desc: str = None
"""context: context"""

@ -23,6 +23,7 @@ from dbgpt.app.knowledge.request.response import (
SpaceQueryResponse,
)
from dbgpt.component import ComponentType
from dbgpt.configs import DOMAIN_TYPE_FINANCIAL_REPORT
from dbgpt.configs.model_config import EMBEDDING_MODEL_CONFIG
from dbgpt.core import LLMClient
from dbgpt.model import DefaultLLMClient
@ -133,6 +134,7 @@ class KnowledgeService:
res.id = space.id
res.name = space.name
res.vector_type = space.vector_type
res.domain_type = space.domain_type
res.desc = space.desc
res.owner = space.owner
res.gmt_created = space.gmt_created
@ -299,6 +301,10 @@ class KnowledgeService:
llm_client=self.llm_client,
model_name=None,
)
if space.domain_type == DOMAIN_TYPE_FINANCIAL_REPORT:
conn_manager = CFG.local_db_manager
conn_manager.delete_db(f"{space.name}_fin_report")
vector_store_connector = VectorStoreConnector(
vector_store_type=space.vector_type, vector_store_config=config
)

@ -3,7 +3,7 @@ import logging
import os
import uuid
from concurrent.futures import Executor
from typing import List, Optional
from typing import List, Optional, cast
import aiofiles
from fastapi import APIRouter, Body, Depends, File, UploadFile
@ -21,8 +21,11 @@ from dbgpt.app.openapi.api_view_model import (
)
from dbgpt.app.scene import BaseChat, ChatFactory, ChatScene
from dbgpt.component import ComponentType
from dbgpt.configs import TAG_KEY_KNOWLEDGE_CHAT_DOMAIN_TYPE
from dbgpt.configs.model_config import KNOWLEDGE_UPLOAD_ROOT_PATH
from dbgpt.core.awel import CommonLLMHttpRequestBody
from dbgpt.core.awel import BaseOperator, CommonLLMHttpRequestBody
from dbgpt.core.awel.dag.dag_manager import DAGManager
from dbgpt.core.awel.util.chat_util import safe_chat_stream_with_dag_task
from dbgpt.core.schema.api import (
ChatCompletionResponseStreamChoice,
ChatCompletionStreamResponse,
@ -127,6 +130,11 @@ def get_worker_manager() -> WorkerManager:
return worker_manager
def get_dag_manager() -> DAGManager:
"""Get the global default DAGManager"""
return DAGManager.get_instance(CFG.SYSTEM_APP)
def get_chat_flow() -> FlowService:
"""Get Chat Flow Service."""
return FlowService.get_instance(CFG.SYSTEM_APP)
@ -252,7 +260,7 @@ async def params_load(
sys_code: Optional[str] = None,
doc_file: UploadFile = File(...),
):
print(f"params_load: {conv_uid},{chat_mode},{model_name}")
logger.info(f"params_load: {conv_uid},{chat_mode},{model_name}")
try:
if doc_file:
# Save the uploaded file
@ -335,7 +343,7 @@ async def chat_completions(
dialogue: ConversationVo = Body(),
flow_service: FlowService = Depends(get_chat_flow),
):
print(
logger.info(
f"chat_completions:{dialogue.chat_mode},{dialogue.select_param},{dialogue.model_name}"
)
headers = {
@ -344,6 +352,7 @@ async def chat_completions(
"Connection": "keep-alive",
"Transfer-Encoding": "chunked",
}
domain_type = _parse_domain_type(dialogue)
if dialogue.chat_mode == ChatScene.ChatAgent.value():
return StreamingResponse(
multi_agents.app_agent_chat(
@ -378,12 +387,20 @@ async def chat_completions(
headers=headers,
media_type="text/event-stream",
)
elif domain_type is not None:
return StreamingResponse(
chat_with_domain_flow(dialogue, domain_type),
headers=headers,
media_type="text/event-stream",
)
else:
with root_tracer.start_span(
"get_chat_instance",
span_type=SpanType.CHAT,
metadata=model_to_dict(dialogue),
):
chat: BaseChat = await get_chat_instance(dialogue)
if not chat.prompt_template.stream_out:
@ -484,3 +501,61 @@ def message2Vo(message: dict, order, model_name) -> MessageVo:
order=order,
model_name=model_name,
)
def _parse_domain_type(dialogue: ConversationVo) -> Optional[str]:
if dialogue.chat_mode == ChatScene.ChatKnowledge.value():
# Supported in the knowledge chat
space_name = dialogue.select_param
spaces = knowledge_service.get_knowledge_space(
KnowledgeSpaceRequest(name=space_name)
)
if len(spaces) == 0:
return Result.failed(
code="E000X", msg=f"Knowledge space {space_name} not found"
)
if spaces[0].domain_type:
return spaces[0].domain_type
else:
return None
async def chat_with_domain_flow(dialogue: ConversationVo, domain_type: str):
"""Chat with domain flow"""
dag_manager = get_dag_manager()
dags = dag_manager.get_dags_by_tag(TAG_KEY_KNOWLEDGE_CHAT_DOMAIN_TYPE, domain_type)
if not dags or not dags[0].leaf_nodes:
raise ValueError(f"Cant find the DAG for domain type {domain_type}")
end_task = cast(BaseOperator, dags[0].leaf_nodes[0])
space = dialogue.select_param
connector_manager = CFG.local_db_manager
# TODO: Some flow maybe not connector
db_list = [item["db_name"] for item in connector_manager.get_db_list()]
db_names = [item for item in db_list if space in item]
if len(db_names) == 0:
raise ValueError(f"fin repost dbname {space}_fin_report not found.")
flow_ctx = {"space": space, "db_name": db_names[0]}
request = CommonLLMHttpRequestBody(
model=dialogue.model_name,
messages=dialogue.user_input,
stream=True,
extra=flow_ctx,
conv_uid=dialogue.conv_uid,
span_id=root_tracer.get_current_span_id(),
chat_mode=dialogue.chat_mode,
chat_param=dialogue.select_param,
user_name=dialogue.user_name,
sys_code=dialogue.sys_code,
incremental=dialogue.incremental,
)
async for output in safe_chat_stream_with_dag_task(end_task, request, False):
text = output.text
if text:
text = text.replace("\n", "\\n")
if output.error_code != 0:
yield f"data:[SERVER_ERROR]{text}\n\n"
break
else:
yield f"data:{text}\n\n"

@ -93,13 +93,6 @@ class ChatScene(Enum):
"Dialogue through natural language and private documents and knowledge bases.",
["Knowledge Space Select"],
)
ExtractTriplet = Scene(
"extract_triplet",
"Extract Triplet",
"Extract Triplet",
["Extract Select"],
True,
)
ExtractSummary = Scene(
"extract_summary",
"Extract Summary",
@ -114,9 +107,6 @@ class ChatScene(Enum):
["Extract Select"],
True,
)
ExtractEntity = Scene(
"extract_entity", "Extract Entity", "Extract Entity", ["Extract Select"], True
)
QueryRewrite = Scene(
"query_rewrite", "query_rewrite", "query_rewrite", ["query_rewrite"], True
)

@ -1,5 +1,4 @@
from dbgpt.app.scene.base_chat import BaseChat
from dbgpt.core import PromptTemplate
from dbgpt.util.singleton import Singleton
from dbgpt.util.tracer import root_tracer
@ -17,10 +16,6 @@ class ChatFactory(metaclass=Singleton):
from dbgpt.app.scene.chat_db.auto_execute.prompt import prompt
from dbgpt.app.scene.chat_db.professional_qa.chat import ChatWithDbQA
from dbgpt.app.scene.chat_db.professional_qa.prompt import prompt
from dbgpt.app.scene.chat_knowledge.extract_entity.chat import ExtractEntity
from dbgpt.app.scene.chat_knowledge.extract_entity.prompt import prompt
from dbgpt.app.scene.chat_knowledge.extract_triplet.chat import ExtractTriplet
from dbgpt.app.scene.chat_knowledge.extract_triplet.prompt import prompt
from dbgpt.app.scene.chat_knowledge.refine_summary.chat import (
ExtractRefineSummary,
)

@ -1,29 +0,0 @@
from typing import Dict
from dbgpt.app.scene import BaseChat, ChatScene
class ExtractEntity(BaseChat):
chat_scene: str = ChatScene.ExtractEntity.value()
"""extracting entities by llm"""
def __init__(self, chat_param: Dict):
""" """
chat_param["chat_mode"] = ChatScene.ExtractEntity
super().__init__(
chat_param=chat_param,
)
self.user_input = chat_param["current_user_input"]
self.extract_mode = chat_param["select_param"]
async def generate_input_values(self):
input_values = {
"text": self.user_input,
}
return input_values
@property
def chat_type(self) -> str:
return ChatScene.ExtractEntity.value

@ -1,34 +0,0 @@
import logging
from typing import Set
from dbgpt.core.interface.output_parser import BaseOutputParser
logger = logging.getLogger(__name__)
class ExtractEntityParser(BaseOutputParser):
def __init__(self, is_stream_out: bool, **kwargs):
super().__init__(is_stream_out=is_stream_out, **kwargs)
def parse_prompt_response(self, response, max_length: int = 128) -> Set[str]:
lowercase = True
# clean_str = super().parse_prompt_response(response)
print("clean prompt response:", response)
results = []
response = response.strip() # Strip newlines from responses.
if response.startswith("KEYWORDS:"):
response = response[len("KEYWORDS:") :]
keywords = response.split(",")
for k in keywords:
rk = k
if lowercase:
rk = rk.lower()
results.append(rk.strip())
return set(results)
def parse_view_response(self, speak, data) -> str:
return data

@ -1,41 +0,0 @@
from dbgpt._private.config import Config
from dbgpt.app.scene import AppScenePromptTemplateAdapter, ChatScene
from dbgpt.app.scene.chat_knowledge.extract_entity.out_parser import ExtractEntityParser
from dbgpt.core import ChatPromptTemplate, HumanPromptTemplate
CFG = Config()
PROMPT_SCENE_DEFINE = """"""
_DEFAULT_TEMPLATE = """
"A question is provided below. Given the question, extract up to 10 "
"keywords from the text. Focus on extracting the keywords that we can use "
"to best lookup answers to the question. Avoid stopwords.\n"
"Example:"
"Text: Alice is Bob's mother."
"KEYWORDS:Alice,mother,Bob\n"
"---------------------\n"
"{text}\n"
"---------------------\n"
"Provide keywords in the following comma-separated format: 'KEYWORDS: <keywords>'\n"
"""
PROMPT_RESPONSE = """"""
PROMPT_NEED_NEED_STREAM_OUT = False
prompt = ChatPromptTemplate(
messages=[
# SystemPromptTemplate.from_template(PROMPT_SCENE_DEFINE),
HumanPromptTemplate.from_template(_DEFAULT_TEMPLATE + PROMPT_RESPONSE),
]
)
prompt_adapter = AppScenePromptTemplateAdapter(
prompt=prompt,
template_scene=ChatScene.ExtractEntity.value(),
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
output_parser=ExtractEntityParser(is_stream_out=PROMPT_NEED_NEED_STREAM_OUT),
need_historical_messages=False,
)
CFG.prompt_template_registry.register(prompt_adapter, is_default=True)

@ -1,29 +0,0 @@
from typing import Dict
from dbgpt.app.scene import BaseChat, ChatScene
class ExtractTriplet(BaseChat):
chat_scene: str = ChatScene.ExtractTriplet.value()
"""extracting triplets by llm"""
def __init__(self, chat_param: Dict):
""" """
chat_param["chat_mode"] = ChatScene.ExtractTriplet
super().__init__(
chat_param=chat_param,
)
self.user_input = chat_param["current_user_input"]
self.extract_mode = chat_param["select_param"]
async def generate_input_values(self):
input_values = {
"text": self.user_input,
}
return input_values
@property
def chat_type(self) -> str:
return ChatScene.ExtractTriplet.value

@ -1,52 +0,0 @@
import logging
import re
from typing import List, Tuple
from dbgpt.core.interface.output_parser import BaseOutputParser
logger = logging.getLogger(__name__)
class ExtractTripleParser(BaseOutputParser):
def __init__(self, is_stream_out: bool, **kwargs):
super().__init__(is_stream_out=is_stream_out, **kwargs)
def parse_prompt_response(
self, response, max_length: int = 128
) -> List[Tuple[str, str, str]]:
# clean_str = super().parse_prompt_response(response)
print("clean prompt response:", response)
if response.startswith("Triplets:"):
response = response[len("Triplets:") :]
pattern = r"\([^()]+\)"
response = re.findall(pattern, response)
# response = response.strip().split("\n")
print("parse prompt response:", response)
results = []
for text in response:
if not text or text[0] != "(" or text[-1] != ")":
# skip empty lines and non-triplets
continue
tokens = text[1:-1].split(",")
if len(tokens) != 3:
continue
if any(len(s.encode("utf-8")) > max_length for s in tokens):
# We count byte-length instead of len() for UTF-8 chars,
# will skip if any of the tokens are too long.
# This is normally due to a poorly formatted triplet
# extraction, in more serious KG building cases
# we'll need NLP models to better extract triplets.
continue
subject, predicate, obj = map(str.strip, tokens)
if not subject or not predicate or not obj:
# skip partial triplets
continue
results.append((subject.lower(), predicate.lower(), obj.lower()))
return results
def parse_view_response(self, speak, data) -> str:
### tool out data to table view
return data

@ -1,51 +0,0 @@
from dbgpt._private.config import Config
from dbgpt.app.scene import AppScenePromptTemplateAdapter, ChatScene
from dbgpt.app.scene.chat_knowledge.extract_triplet.out_parser import (
ExtractTripleParser,
)
from dbgpt.core import ChatPromptTemplate, HumanPromptTemplate
CFG = Config()
PROMPT_SCENE_DEFINE = """"""
_DEFAULT_TEMPLATE = """
"Some text is provided below. Given the text, extract up to 10"
"knowledge triplets in the form of (subject, predicate, object). Avoid stopwords.\n"
"---------------------\n"
"Example:"
"Text: Alice is Bob's mother."
"Triplets:\n(Alice, is mother of, Bob)\n"
"Text: Philz is a coffee shop founded in Berkeley in 1982.\n"
"Triplets:\n"
"(Philz, is, coffee shop)\n"
"(Philz, founded in, Berkeley)\n"
"(Philz, founded in, 1982)\n"
"---------------------\n"
"Text: {text}\n"
"Triplets:\n"
ensure Respond in the following List(Tuple) format:
'(Stephen Curry, plays for, Golden State Warriors)\n(Stephen Curry, known for, shooting skills)\n(Stephen Curry, attended, Davidson College)\n(Stephen Curry, led, team to success)'
"""
PROMPT_RESPONSE = """"""
PROMPT_NEED_NEED_STREAM_OUT = False
prompt = ChatPromptTemplate(
messages=[
# SystemPromptTemplate.from_template(PROMPT_SCENE_DEFINE),
HumanPromptTemplate.from_template(_DEFAULT_TEMPLATE + PROMPT_RESPONSE),
]
)
prompt_adapter = AppScenePromptTemplateAdapter(
prompt=prompt,
template_scene=ChatScene.ExtractTriplet.value(),
stream_out=PROMPT_NEED_NEED_STREAM_OUT,
output_parser=ExtractTripleParser(is_stream_out=PROMPT_NEED_NEED_STREAM_OUT),
need_historical_messages=False,
)
CFG.prompt_template_registry.register(prompt_adapter, is_default=True)

@ -18,7 +18,7 @@ _DEFAULT_TEMPLATE_ZH = """ 基于以下给出的已知信息, 准守规范约束
规范约束:
1.如果已知信息包含的图片链接表格代码块等特殊markdown标签格式的信息确保在答案中包含原文这些图片链接表格和代码标签不要丢弃不要修改:图片格式![image.png](xxx), 链接格式:[xxx](xxx), 表格格式:|xxx|xxx|xxx|, 代码格式:```xxx```.
2.如果无法从提供的内容中获取答案, 请说: "知识库中提供的内容不足以回答此问题" 禁止胡乱编造.
3.回答的时候最好按照1.2.3.点进行总结.
3.回答的时候最好按照1.2.3.点进行总结, 并以markdwon格式显示.
已知内容:
{context}
问题:
@ -29,7 +29,7 @@ constraints:
1.Ensure to include original markdown formatting elements such as images, links, tables, or code blocks without alteration in the response if they are present in the provided information.
For example, image format should be ![image.png](xxx), link format [xxx](xxx), table format should be represented with |xxx|xxx|xxx|, and code format with xxx.
2.If the information available in the knowledge base is insufficient to answer the question, state clearly: "The content provided in the knowledge base is not enough to answer this question," and avoid making up answers.
3.When responding, it is best to summarize the points in the order of 1, 2, 3.
3.When responding, it is best to summarize the points in the order of 1, 2, 3, And displayed in markdwon format.
known information:
{context}
question:

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -1,4 +1,4 @@
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[2057],{12057:function(e,t,i){i.r(t),i.d(t,{default:function(){return a}});var s=i(812),n=[{key:"obwhite",config:{base:"vs",inherit:!0,rules:[{foreground:"09885A",token:"comment"}],colors:{"editor.foreground":"#3B3B3B","editor.background":"#FFFFFF","editor.selectionBackground":"#BAD6FD","editor.lineHighlightBackground":"#00000012","editorCursor.foreground":"#000000","editorWhitespace.foreground":"#BFBFBF"}}},{key:"obdark",config:{base:"vs-dark",inherit:!0,rules:[{foreground:"F7F9FB",token:"identifier"},{foreground:"98D782",token:"number"}],colors:{}}}],a=class{constructor(){this.modelOptionsMap=new Map}setup(e=["mysql","obmysql","oboracle"]){e.forEach(e=>{switch(e){case"mysql":i.e(2384).then(i.bind(i,2384)).then(e=>{e.setup(this)});break;case"obmysql":i.e(8748).then(i.bind(i,48748)).then(e=>{e.setup(this)});break;case"oboracle":i.e(7121).then(i.bind(i,87121)).then(e=>{e.setup(this)})}}),n.forEach(e=>{s.j6.defineTheme(e.key,e.config)})}setModelOptions(e,t){this.modelOptionsMap.set(e,t)}}},812:function(e,t,i){i.d(t,{j6:function(){return r.editor},Mj:function(){return r.languages}}),i(29477),i(90236),i(51725),i(42549),i(24336),i(72102),i(55833),i(34281),i(38334),i(29079),i(39956),i(93740),i(85754),i(41895),i(27107),i(76917),i(22482),i(55826),i(40714),i(44125),i(61097),i(99803),i(62078),i(95817),i(22470),i(66122),i(19646),i(68077),i(84602),i(77563),i(70448),i(97830),i(97615),i(49504),i(76),i(18408),i(77061),i(97660),i(91732),i(60669),i(96816),i(73945),i(45048),i(82379),i(47721),i(98762),i(61984),i(76092),i(88088),i(15662),i(64662),i(52614),i(95180),i(79607),i(61271),i(70943),i(37181),i(86709);var s,n,a,o,r=i(9869),l=i(25552);/*!-----------------------------------------------------------------------------
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[2057],{12057:function(e,t,i){i.r(t),i.d(t,{default:function(){return a}});var s=i(812),n=[{key:"obwhite",config:{base:"vs",inherit:!0,rules:[{foreground:"09885A",token:"comment"}],colors:{"editor.foreground":"#3B3B3B","editor.background":"#FFFFFF","editor.selectionBackground":"#BAD6FD","editor.lineHighlightBackground":"#00000012","editorCursor.foreground":"#000000","editorWhitespace.foreground":"#BFBFBF"}}},{key:"obdark",config:{base:"vs-dark",inherit:!0,rules:[{foreground:"F7F9FB",token:"identifier"},{foreground:"98D782",token:"number"}],colors:{}}}],a=class{constructor(){this.modelOptionsMap=new Map}setup(e=["mysql","obmysql","oboracle"]){e.forEach(e=>{switch(e){case"mysql":i.e(2384).then(i.bind(i,2384)).then(e=>{e.setup(this)});break;case"obmysql":i.e(8748).then(i.bind(i,48748)).then(e=>{e.setup(this)});break;case"oboracle":i.e(7121).then(i.bind(i,87121)).then(e=>{e.setup(this)})}}),n.forEach(e=>{s.j6.defineTheme(e.key,e.config)})}setModelOptions(e,t){this.modelOptionsMap.set(e,t)}}},812:function(e,t,i){i.d(t,{j6:function(){return r.editor},Mj:function(){return r.languages}}),i(29477),i(90236),i(71387),i(42549),i(24336),i(72102),i(55833),i(34281),i(38334),i(29079),i(39956),i(93740),i(85754),i(41895),i(27107),i(76917),i(22482),i(55826),i(40714),i(44125),i(61097),i(99803),i(62078),i(95817),i(22470),i(66122),i(19646),i(68077),i(84602),i(77563),i(70448),i(97830),i(97615),i(49504),i(76),i(18408),i(77061),i(97660),i(91732),i(60669),i(96816),i(73945),i(45048),i(82379),i(47721),i(98762),i(61984),i(76092),i(88088),i(15662),i(64662),i(52614),i(95180),i(79607),i(61271),i(70943),i(37181),i(86709);var s,n,a,o,r=i(9869),l=i(25552);/*!-----------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Version: 0.34.1(547870b6881302c5b4ff32173c16d06009e3588f)
* Released under the MIT license
@ -48,7 +48,7 @@
* Version: 0.34.1(547870b6881302c5b4ff32173c16d06009e3588f)
* Released under the MIT license
* https://github.com/microsoft/monaco-editor/blob/main/LICENSE.txt
*-----------------------------------------------------------------------------*/(0,l.H)({id:"csharp",extensions:[".cs",".csx",".cake"],aliases:["C#","csharp"],loader:()=>i.e(8719).then(i.bind(i,50669))}),/*!-----------------------------------------------------------------------------
*-----------------------------------------------------------------------------*/(0,l.H)({id:"csharp",extensions:[".cs",".csx",".cake"],aliases:["C#","csharp"],loader:()=>i.e(8719).then(i.bind(i,18719))}),/*!-----------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Version: 0.34.1(547870b6881302c5b4ff32173c16d06009e3588f)
* Released under the MIT license

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -1,4 +1,4 @@
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[5813],{85813:function(e,t,a){a.d(t,{Z:function(){return U}});var i=a(94184),n=a.n(i),r=a(98423),l=a(67294),o=a(53124),s=a(98675),d=e=>{let{prefixCls:t,className:a,style:i,size:r,shape:o}=e,s=n()({[`${t}-lg`]:"large"===r,[`${t}-sm`]:"small"===r}),d=n()({[`${t}-circle`]:"circle"===o,[`${t}-square`]:"square"===o,[`${t}-round`]:"round"===o}),c=l.useMemo(()=>"number"==typeof r?{width:r,height:r,lineHeight:`${r}px`}:{},[r]);return l.createElement("span",{className:n()(t,s,d,a),style:Object.assign(Object.assign({},c),i)})},c=a(23183),g=a(67968),$=a(45503);let b=new c.E4("ant-skeleton-loading",{"0%":{backgroundPosition:"100% 50%"},"100%":{backgroundPosition:"0 50%"}}),p=e=>({height:e,lineHeight:`${e}px`}),m=e=>Object.assign({width:e},p(e)),u=e=>({background:e.skeletonLoadingBackground,backgroundSize:"400% 100%",animationName:b,animationDuration:e.skeletonLoadingMotionDuration,animationTimingFunction:"ease",animationIterationCount:"infinite"}),h=e=>Object.assign({width:5*e,minWidth:5*e},p(e)),f=e=>{let{skeletonAvatarCls:t,gradientFromColor:a,controlHeight:i,controlHeightLG:n,controlHeightSM:r}=e;return{[`${t}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:a},m(i)),[`${t}${t}-circle`]:{borderRadius:"50%"},[`${t}${t}-lg`]:Object.assign({},m(n)),[`${t}${t}-sm`]:Object.assign({},m(r))}},v=e=>{let{controlHeight:t,borderRadiusSM:a,skeletonInputCls:i,controlHeightLG:n,controlHeightSM:r,gradientFromColor:l}=e;return{[`${i}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:l,borderRadius:a},h(t)),[`${i}-lg`]:Object.assign({},h(n)),[`${i}-sm`]:Object.assign({},h(r))}},x=e=>Object.assign({width:e},p(e)),y=e=>{let{skeletonImageCls:t,imageSizeBase:a,gradientFromColor:i,borderRadiusSM:n}=e;return{[`${t}`]:Object.assign(Object.assign({display:"flex",alignItems:"center",justifyContent:"center",verticalAlign:"top",background:i,borderRadius:n},x(2*a)),{[`${t}-path`]:{fill:"#bfbfbf"},[`${t}-svg`]:Object.assign(Object.assign({},x(a)),{maxWidth:4*a,maxHeight:4*a}),[`${t}-svg${t}-svg-circle`]:{borderRadius:"50%"}}),[`${t}${t}-circle`]:{borderRadius:"50%"}}},O=(e,t,a)=>{let{skeletonButtonCls:i}=e;return{[`${a}${i}-circle`]:{width:t,minWidth:t,borderRadius:"50%"},[`${a}${i}-round`]:{borderRadius:t}}},j=e=>Object.assign({width:2*e,minWidth:2*e},p(e)),S=e=>{let{borderRadiusSM:t,skeletonButtonCls:a,controlHeight:i,controlHeightLG:n,controlHeightSM:r,gradientFromColor:l}=e;return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({[`${a}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:l,borderRadius:t,width:2*i,minWidth:2*i},j(i))},O(e,i,a)),{[`${a}-lg`]:Object.assign({},j(n))}),O(e,n,`${a}-lg`)),{[`${a}-sm`]:Object.assign({},j(r))}),O(e,r,`${a}-sm`))},E=e=>{let{componentCls:t,skeletonAvatarCls:a,skeletonTitleCls:i,skeletonParagraphCls:n,skeletonButtonCls:r,skeletonInputCls:l,skeletonImageCls:o,controlHeight:s,controlHeightLG:d,controlHeightSM:c,gradientFromColor:g,padding:$,marginSM:b,borderRadius:p,titleHeight:h,blockRadius:x,paragraphLiHeight:O,controlHeightXS:j,paragraphMarginTop:E}=e;return{[`${t}`]:{display:"table",width:"100%",[`${t}-header`]:{display:"table-cell",paddingInlineEnd:$,verticalAlign:"top",[`${a}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:g},m(s)),[`${a}-circle`]:{borderRadius:"50%"},[`${a}-lg`]:Object.assign({},m(d)),[`${a}-sm`]:Object.assign({},m(c))},[`${t}-content`]:{display:"table-cell",width:"100%",verticalAlign:"top",[`${i}`]:{width:"100%",height:h,background:g,borderRadius:x,[`+ ${n}`]:{marginBlockStart:c}},[`${n}`]:{padding:0,"> li":{width:"100%",height:O,listStyle:"none",background:g,borderRadius:x,"+ li":{marginBlockStart:j}}},[`${n}> li:last-child:not(:first-child):not(:nth-child(2))`]:{width:"61%"}},[`&-round ${t}-content`]:{[`${i}, ${n} > li`]:{borderRadius:p}}},[`${t}-with-avatar ${t}-content`]:{[`${i}`]:{marginBlockStart:b,[`+ ${n}`]:{marginBlockStart:E}}},[`${t}${t}-element`]:Object.assign(Object.assign(Object.assign(Object.assign({display:"inline-block",width:"auto"},S(e)),f(e)),v(e)),y(e)),[`${t}${t}-block`]:{width:"100%",[`${r}`]:{width:"100%"},[`${l}`]:{width:"100%"}},[`${t}${t}-active`]:{[`
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[5813],{85813:function(e,t,a){a.d(t,{Z:function(){return U}});var i=a(93967),n=a.n(i),r=a(98423),l=a(67294),o=a(53124),s=a(98675),d=e=>{let{prefixCls:t,className:a,style:i,size:r,shape:o}=e,s=n()({[`${t}-lg`]:"large"===r,[`${t}-sm`]:"small"===r}),d=n()({[`${t}-circle`]:"circle"===o,[`${t}-square`]:"square"===o,[`${t}-round`]:"round"===o}),c=l.useMemo(()=>"number"==typeof r?{width:r,height:r,lineHeight:`${r}px`}:{},[r]);return l.createElement("span",{className:n()(t,s,d,a),style:Object.assign(Object.assign({},c),i)})},c=a(77794),g=a(67968),$=a(45503);let b=new c.E4("ant-skeleton-loading",{"0%":{backgroundPosition:"100% 50%"},"100%":{backgroundPosition:"0 50%"}}),p=e=>({height:e,lineHeight:`${e}px`}),m=e=>Object.assign({width:e},p(e)),u=e=>({background:e.skeletonLoadingBackground,backgroundSize:"400% 100%",animationName:b,animationDuration:e.skeletonLoadingMotionDuration,animationTimingFunction:"ease",animationIterationCount:"infinite"}),h=e=>Object.assign({width:5*e,minWidth:5*e},p(e)),f=e=>{let{skeletonAvatarCls:t,gradientFromColor:a,controlHeight:i,controlHeightLG:n,controlHeightSM:r}=e;return{[`${t}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:a},m(i)),[`${t}${t}-circle`]:{borderRadius:"50%"},[`${t}${t}-lg`]:Object.assign({},m(n)),[`${t}${t}-sm`]:Object.assign({},m(r))}},v=e=>{let{controlHeight:t,borderRadiusSM:a,skeletonInputCls:i,controlHeightLG:n,controlHeightSM:r,gradientFromColor:l}=e;return{[`${i}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:l,borderRadius:a},h(t)),[`${i}-lg`]:Object.assign({},h(n)),[`${i}-sm`]:Object.assign({},h(r))}},x=e=>Object.assign({width:e},p(e)),y=e=>{let{skeletonImageCls:t,imageSizeBase:a,gradientFromColor:i,borderRadiusSM:n}=e;return{[`${t}`]:Object.assign(Object.assign({display:"flex",alignItems:"center",justifyContent:"center",verticalAlign:"top",background:i,borderRadius:n},x(2*a)),{[`${t}-path`]:{fill:"#bfbfbf"},[`${t}-svg`]:Object.assign(Object.assign({},x(a)),{maxWidth:4*a,maxHeight:4*a}),[`${t}-svg${t}-svg-circle`]:{borderRadius:"50%"}}),[`${t}${t}-circle`]:{borderRadius:"50%"}}},O=(e,t,a)=>{let{skeletonButtonCls:i}=e;return{[`${a}${i}-circle`]:{width:t,minWidth:t,borderRadius:"50%"},[`${a}${i}-round`]:{borderRadius:t}}},j=e=>Object.assign({width:2*e,minWidth:2*e},p(e)),S=e=>{let{borderRadiusSM:t,skeletonButtonCls:a,controlHeight:i,controlHeightLG:n,controlHeightSM:r,gradientFromColor:l}=e;return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({[`${a}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:l,borderRadius:t,width:2*i,minWidth:2*i},j(i))},O(e,i,a)),{[`${a}-lg`]:Object.assign({},j(n))}),O(e,n,`${a}-lg`)),{[`${a}-sm`]:Object.assign({},j(r))}),O(e,r,`${a}-sm`))},E=e=>{let{componentCls:t,skeletonAvatarCls:a,skeletonTitleCls:i,skeletonParagraphCls:n,skeletonButtonCls:r,skeletonInputCls:l,skeletonImageCls:o,controlHeight:s,controlHeightLG:d,controlHeightSM:c,gradientFromColor:g,padding:$,marginSM:b,borderRadius:p,titleHeight:h,blockRadius:x,paragraphLiHeight:O,controlHeightXS:j,paragraphMarginTop:E}=e;return{[`${t}`]:{display:"table",width:"100%",[`${t}-header`]:{display:"table-cell",paddingInlineEnd:$,verticalAlign:"top",[`${a}`]:Object.assign({display:"inline-block",verticalAlign:"top",background:g},m(s)),[`${a}-circle`]:{borderRadius:"50%"},[`${a}-lg`]:Object.assign({},m(d)),[`${a}-sm`]:Object.assign({},m(c))},[`${t}-content`]:{display:"table-cell",width:"100%",verticalAlign:"top",[`${i}`]:{width:"100%",height:h,background:g,borderRadius:x,[`+ ${n}`]:{marginBlockStart:c}},[`${n}`]:{padding:0,"> li":{width:"100%",height:O,listStyle:"none",background:g,borderRadius:x,"+ li":{marginBlockStart:j}}},[`${n}> li:last-child:not(:first-child):not(:nth-child(2))`]:{width:"61%"}},[`&-round ${t}-content`]:{[`${i}, ${n} > li`]:{borderRadius:p}}},[`${t}-with-avatar ${t}-content`]:{[`${i}`]:{marginBlockStart:b,[`+ ${n}`]:{marginBlockStart:E}}},[`${t}${t}-element`]:Object.assign(Object.assign(Object.assign(Object.assign({display:"inline-block",width:"auto"},S(e)),f(e)),v(e)),y(e)),[`${t}${t}-block`]:{width:"100%",[`${r}`]:{width:"100%"},[`${l}`]:{width:"100%"}},[`${t}${t}-active`]:{[`
${i},
${n} > li,
${a},

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -1,4 +1,4 @@
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[8719],{50669:function(e,t,n){n.r(t),n.d(t,{conf:function(){return s},language:function(){return o}});/*!-----------------------------------------------------------------------------
"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[8719],{18719:function(e,t,n){n.r(t),n.d(t,{conf:function(){return s},language:function(){return o}});/*!-----------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Version: 0.34.1(547870b6881302c5b4ff32173c16d06009e3588f)
* Released under the MIT license

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -1 +0,0 @@
self.__BUILD_MANIFEST=function(s,c,a,e,t,n,f,d,k,h,i,u,b,j,p,g,o,l,r,_){return{__rewrites:{beforeFiles:[],afterFiles:[],fallback:[]},"/":[p,s,c,e,a,f,h,d,g,"static/chunks/9305-f44429d5185a9fc7.js","static/chunks/1353-705aa47cc2b94999.js","static/chunks/pages/index-0b2d61c1c6358f20.js"],"/_error":["static/chunks/pages/_error-8095ba9e1bf12f30.js"],"/agent":[s,c,a,t,h,n,"static/chunks/pages/agent-2be7990da37f5165.js"],"/app":[i,s,c,e,a,t,n,u,b,o,j,"static/chunks/pages/app-8154f6fcced2f743.js"],"/chat":["static/chunks/pages/chat-deaf5379f2dc5cac.js"],"/database":[s,c,e,a,t,n,d,k,"static/chunks/3718-e111d727d432bdd2.js","static/chunks/pages/database-7384ab94b08f23ff.js"],"/flow":[i,s,c,a,u,b,o,j,"static/chunks/pages/flow-33fe9f396642fb4c.js"],"/flow/canvas":[p,i,s,c,e,a,f,d,u,k,b,l,g,"static/chunks/1425-6e94ae18b1ac5a70.js",j,"static/chunks/pages/flow/canvas-644b6ee718585173.js"],"/knowledge":[r,s,c,e,a,t,h,n,d,k,_,l,"static/chunks/5237-1d36a3742424b75e.js","static/chunks/pages/knowledge-223d50e9531bd961.js"],"/knowledge/chunk":[s,e,t,f,n,"static/chunks/pages/knowledge/chunk-625a32aed5f380e2.js"],"/knowledge/graph":["static/chunks/90912e1b-ed32608ee46ab40f.js","static/chunks/193-5e83ce3fd4f165ef.js","static/chunks/pages/knowledge/graph-9fb1ec6bf06d5108.js"],"/models":[r,s,c,e,a,k,"static/chunks/3444-30181eacc7980e66.js","static/chunks/pages/models-446238c56e41aa1b.js"],"/prompt":[s,c,e,a,f,_,"static/chunks/7184-3ca3f58327a6986a.js","static/chunks/7869-1a99e25b182b3eaa.js","static/chunks/pages/prompt-c44ac718b4d637c9.js"],sortedPages:["/","/_app","/_error","/agent","/app","/chat","/database","/flow","/flow/canvas","/knowledge","/knowledge/chunk","/knowledge/graph","/models","/prompt"]}}("static/chunks/2185-30f9d0578fa0d631.js","static/chunks/5503-c65f6d730754acc7.js","static/chunks/9479-21f588e1fd4e6b6d.js","static/chunks/1009-4b2af86bde623424.js","static/chunks/785-c3544abc036fc97d.js","static/chunks/5813-c6244a8eba7ef4ae.js","static/chunks/1647-8683da4db89d68c1.js","static/chunks/411-b5d3e7f64bee2335.js","static/chunks/8928-0e78def492052d13.js","static/chunks/4553-61740188e6a650a8.js","static/chunks/971df74e-7436ff4085ebb785.js","static/chunks/7434-29506257e67e8077.js","static/chunks/9924-5bce555f07385e1f.js","static/css/b4846eed11c4725f.css","static/chunks/29107295-75edf0bf34e24b1e.js","static/chunks/2487-cda9d2a2fd712a15.js","static/chunks/6165-93d23bc520382b2c.js","static/chunks/2282-96412afca1591c9a.js","static/chunks/75fc9c18-1d6133135d3d283c.js","static/chunks/5733-7ef320ab0f876a5e.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();

@ -0,0 +1 @@
self.__BUILD_MANIFEST=function(s,c,a,e,t,f,n,d,k,b,h,i,u,j,p,g,o,l,r,_){return{__rewrites:{beforeFiles:[],afterFiles:[],fallback:[]},"/":[p,s,c,e,a,n,b,d,g,"static/chunks/9305-eb817abebcfffa20.js","static/chunks/1353-1dacbd59a5cf5fb8.js","static/chunks/pages/index-217e7be2e89f3434.js"],"/_error":["static/chunks/pages/_error-8095ba9e1bf12f30.js"],"/agent":[s,c,a,t,b,f,"static/chunks/pages/agent-baf4571810cdd046.js"],"/app":[h,s,c,e,a,t,f,i,u,o,j,"static/chunks/pages/app-66a520069742bfcc.js"],"/chat":["static/chunks/pages/chat-dc50c4cc53e5b3a0.js"],"/database":[s,c,e,a,t,f,d,k,"static/chunks/3718-87572fc24f1c1cdf.js","static/chunks/pages/database-0428b7022de673a0.js"],"/flow":[h,s,c,a,i,u,o,j,"static/chunks/pages/flow-18c806ed1099f71f.js"],"/flow/canvas":[p,h,s,c,e,a,n,d,i,k,u,l,g,"static/chunks/1425-97f488f9d27648f7.js",j,"static/chunks/pages/flow/canvas-e8ea0a0f7aee0e13.js"],"/knowledge":[r,s,c,e,a,t,b,f,d,k,_,l,"static/chunks/5237-f8ce62e2a793a23a.js","static/chunks/pages/knowledge-71bdd8ab0d1d3756.js"],"/knowledge/chunk":[s,e,t,n,f,"static/chunks/pages/knowledge/chunk-7326b8534d2f9172.js"],"/knowledge/graph":["static/chunks/90912e1b-91352761084b91ff.js","static/chunks/193-b83823cd8ccb6a41.js","static/chunks/pages/knowledge/graph-f0b2c9d145d2c446.js"],"/models":[r,s,c,e,a,k,"static/chunks/3444-1911da618e1e8971.js","static/chunks/pages/models-091bfc790579fe32.js"],"/prompt":[s,c,e,a,n,_,"static/chunks/7119-64fb8f0364433c24.js","static/chunks/2453-26e8f6483c6e4575.js","static/chunks/pages/prompt-f0ec387782dbed90.js"],sortedPages:["/","/_app","/_error","/agent","/app","/chat","/database","/flow","/flow/canvas","/knowledge","/knowledge/chunk","/knowledge/graph","/models","/prompt"]}}("static/chunks/2185-6a46fbdf54a5364a.js","static/chunks/5503-f73cb46e78278f42.js","static/chunks/9479-8a6b32582d10ba1f.js","static/chunks/1009-5d81dfaf6e0efeb1.js","static/chunks/785-7baed2336ce7962c.js","static/chunks/5813-ba0135c147bac9a0.js","static/chunks/1647-5c6bd87432337e74.js","static/chunks/411-3e1adedff6595f9e.js","static/chunks/8928-0dd0f412ae0f4962.js","static/chunks/4553-2eeeec162e6b9d24.js","static/chunks/971df74e-084169c6c09eb1fe.js","static/chunks/7434-29506257e67e8077.js","static/chunks/9924-42c72dae9efe5ccd.js","static/css/a275cc2b185e04f8.css","static/chunks/29107295-75edf0bf34e24b1e.js","static/chunks/2487-4522eeb3601ff54e.js","static/chunks/6165-48eaed9a80fbbd1b.js","static/chunks/2282-e90d1926eaaf3b3b.js","static/chunks/75fc9c18-f5c95b15762b1b2f.js","static/chunks/5733-ec2a588444393e17.js"),self.__BUILD_MANIFEST_CB&&self.__BUILD_MANIFEST_CB();

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save