diff --git a/docker/.env b/docker/.env
index 5993705541912e5d7cfb514dccdddf736d8780b0..fefb602e242ed542ae68c2792aa68e0207856867 100644
--- a/docker/.env
+++ b/docker/.env
@@ -11,7 +11,7 @@ ES_PORT=9200
 KIBANA_PORT=6601
 
 # Increase or decrease based on the available host memory (in bytes)
-MEM_LIMIT=1073741824
+MEM_LIMIT=4073741824
 
 POSTGRES_USER=root
 POSTGRES_PASSWORD=infiniflow_docgpt
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 249e5ab5547292e153f67fe03e45d33351e6790e..04aabdcc121620b01d79090eb2c1476fc29e2272 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -54,6 +54,22 @@ services:
       - docgpt
     restart: always
 
+  minio:
+    image: quay.io/minio/minio:RELEASE.2023-12-20T01-00-02Z
+    container_name: docgpt-minio
+    command: server --console-address ":9001" /data
+    ports:
+      - 9000:9000
+      - 9001:9001
+    environment:
+      - MINIO_ROOT_USER=${MINIO_USER}
+      - MINIO_ROOT_PASSWORD=${MINIO_PASSWORD}
+    volumes:
+      - minio_data:/data
+    networks:
+      - docgpt
+    restart: always
+
 
 volumes:
   esdata01:
@@ -62,6 +78,8 @@ volumes:
     driver: local
   pg_data:
     driver: local
+  minio_data:
+    driver: local
 
 networks:
   docgpt:
diff --git a/migration/src/m20220101_000001_create_table.rs b/migration/src/m20220101_000001_create_table.rs
index f310a29081b9123b4a0fd07d99cd488ee2430dfd..505847d47c6501e7460c135dea1cbf008aa75610 100644
--- a/migration/src/m20220101_000001_create_table.rs
+++ b/migration/src/m20220101_000001_create_table.rs
@@ -20,13 +20,14 @@ impl MigrationTrait for Migration {
                     )
                     .col(ColumnDef::new(UserInfo::Email).string().not_null())
                     .col(ColumnDef::new(UserInfo::Nickname).string().not_null())
-                    .col(ColumnDef::new(UserInfo::AvatarUrl).string())
-                    .col(ColumnDef::new(UserInfo::ColorSchema).string().default("dark"))
+                    .col(ColumnDef::new(UserInfo::AvatarBase64).string())
+                    .col(ColumnDef::new(UserInfo::ColorScheme).string().default("dark"))
                     .col(ColumnDef::new(UserInfo::ListStyle).string().default("list"))
                     .col(ColumnDef::new(UserInfo::Language).string().default("chinese"))
                     .col(ColumnDef::new(UserInfo::Password).string().not_null())
-                    .col(ColumnDef::new(UserInfo::CreatedAt).date().not_null())
-                    .col(ColumnDef::new(UserInfo::UpdatedAt).date().not_null())
+                    .col(ColumnDef::new(UserInfo::LastLoginAt).timestamp_with_time_zone())
+                    .col(ColumnDef::new(UserInfo::CreatedAt).timestamp_with_time_zone().not_null())
+                    .col(ColumnDef::new(UserInfo::UpdatedAt).timestamp_with_time_zone().not_null())
                     .col(ColumnDef::new(UserInfo::IsDeleted).boolean().default(false))
                     .to_owned(),
             )
@@ -49,9 +50,9 @@ impl MigrationTrait for Migration {
                     .col(ColumnDef::new(TagInfo::Regx).string())
                     .col(ColumnDef::new(TagInfo::Color).tiny_unsigned().default(1))
                     .col(ColumnDef::new(TagInfo::Icon).tiny_unsigned().default(1))
-                    .col(ColumnDef::new(TagInfo::Dir).string())
-                    .col(ColumnDef::new(TagInfo::CreatedAt).date().not_null())
-                    .col(ColumnDef::new(TagInfo::UpdatedAt).date().not_null())
+                    .col(ColumnDef::new(TagInfo::FolderId).big_integer())
+                    .col(ColumnDef::new(TagInfo::CreatedAt).timestamp_with_time_zone().not_null())
+                    .col(ColumnDef::new(TagInfo::UpdatedAt).timestamp_with_time_zone().not_null())
                     .col(ColumnDef::new(TagInfo::IsDeleted).boolean().default(false))
                     .to_owned(),
             )
@@ -89,6 +90,10 @@ impl MigrationTrait for Migration {
                     )
                     .col(ColumnDef::new(Kb2Doc::KbId).big_integer())
                     .col(ColumnDef::new(Kb2Doc::Did).big_integer())
+                    .col(ColumnDef::new(Kb2Doc::KbProgress).float().default(0))
+                    .col(ColumnDef::new(Kb2Doc::KbProgressMsg).string().default(""))
+                    .col(ColumnDef::new(Kb2Doc::UpdatedAt).timestamp_with_time_zone().not_null())
+                    .col(ColumnDef::new(Kb2Doc::IsDeleted).boolean().default(false))
                     .to_owned(),
             )
             .await?;
@@ -141,8 +146,8 @@ impl MigrationTrait for Migration {
                     .col(ColumnDef::new(KbInfo::Uid).big_integer().not_null())
                     .col(ColumnDef::new(KbInfo::KbName).string().not_null())
                     .col(ColumnDef::new(KbInfo::Icon).tiny_unsigned().default(1))
-                    .col(ColumnDef::new(KbInfo::CreatedAt).date().not_null())
-                    .col(ColumnDef::new(KbInfo::UpdatedAt).date().not_null())
+                    .col(ColumnDef::new(KbInfo::CreatedAt).timestamp_with_time_zone().not_null())
+                    .col(ColumnDef::new(KbInfo::UpdatedAt).timestamp_with_time_zone().not_null())
                     .col(ColumnDef::new(KbInfo::IsDeleted).boolean().default(false))
                     .to_owned(),
             )
@@ -162,10 +167,8 @@ impl MigrationTrait for Migration {
                     .col(ColumnDef::new(DocInfo::Location).string().not_null())
                     .col(ColumnDef::new(DocInfo::Size).big_integer().not_null())
                     .col(ColumnDef::new(DocInfo::Type).string().not_null()).comment("doc|folder")
-                    .col(ColumnDef::new(DocInfo::KbProgress).float().default(0))
-                    .col(ColumnDef::new(DocInfo::KbProgressMsg).string().default(""))
-                    .col(ColumnDef::new(DocInfo::CreatedAt).date().not_null())
-                    .col(ColumnDef::new(DocInfo::UpdatedAt).date().not_null())
+                    .col(ColumnDef::new(DocInfo::CreatedAt).timestamp_with_time_zone().not_null())
+                    .col(ColumnDef::new(DocInfo::UpdatedAt).timestamp_with_time_zone().not_null())
                     .col(ColumnDef::new(DocInfo::IsDeleted).boolean().default(false))
                     .to_owned(),
             )
@@ -182,10 +185,11 @@ impl MigrationTrait for Migration {
                         .auto_increment()
                         .primary_key())
                     .col(ColumnDef::new(DialogInfo::Uid).big_integer().not_null())
+                    .col(ColumnDef::new(DialogInfo::KbId).big_integer().not_null())
                     .col(ColumnDef::new(DialogInfo::DialogName).string().not_null())
                     .col(ColumnDef::new(DialogInfo::History).string().comment("json"))
-                    .col(ColumnDef::new(DialogInfo::CreatedAt).date().not_null())
-                    .col(ColumnDef::new(DialogInfo::UpdatedAt).date().not_null())
+                    .col(ColumnDef::new(DialogInfo::CreatedAt).timestamp_with_time_zone().not_null())
+                    .col(ColumnDef::new(DialogInfo::UpdatedAt).timestamp_with_time_zone().not_null())
                     .col(ColumnDef::new(DialogInfo::IsDeleted).boolean().default(false))
                     .to_owned(),
             )
@@ -241,11 +245,12 @@ enum UserInfo {
     Uid,
     Email,
     Nickname,
-    AvatarUrl,
-    ColorSchema,
+    AvatarBase64,
+    ColorScheme,
     ListStyle,
     Language,
     Password,
+    LastLoginAt,
     CreatedAt,
     UpdatedAt,
     IsDeleted,
@@ -260,7 +265,7 @@ enum TagInfo {
     Regx,
     Color,
     Icon,
-    Dir,
+    FolderId,
     CreatedAt,
     UpdatedAt,
     IsDeleted,
@@ -280,6 +285,10 @@ enum Kb2Doc {
     Id,
     KbId,
     Did,
+    KbProgress,
+    KbProgressMsg,
+    UpdatedAt,
+    IsDeleted,
 }
 
 #[derive(DeriveIden)]
@@ -319,8 +328,6 @@ enum DocInfo {
     Location,
     Size,
     Type,
-    KbProgress,
-    KbProgressMsg,
     CreatedAt,
     UpdatedAt,
     IsDeleted,
@@ -329,8 +336,9 @@ enum DocInfo {
 #[derive(DeriveIden)]
 enum DialogInfo {
     Table,
-    DialogId,
     Uid,
+    KbId,
+    DialogId,
     DialogName,
     History,
     CreatedAt,
diff --git a/python/conf/sys.cnf b/python/conf/sys.cnf
index b8d3268dd05cfb17126cbe18e11007f56d8182b6..e217ad565101b574135a238b6538b059c197ef7f 100755
--- a/python/conf/sys.cnf
+++ b/python/conf/sys.cnf
@@ -1,7 +1,10 @@
 [infiniflow]
-es=127.0.0.1:9200
+es=http://127.0.0.1:9200
 pgdb_usr=root
 pgdb_pwd=infiniflow_docgpt
 pgdb_host=127.0.0.1
 pgdb_port=5455
+minio_host=127.0.0.1:9000
+minio_usr=infiniflow
+minio_pwd=infiniflow_docgpt
 
diff --git a/python/nlp/huchunk.py b/python/nlp/huchunk.py
index 6164375d9dcbb34fe56e10d916d7fb0feffbaaf6..4e5ca8e87a60c2405cc0b2779d7879aaff021bf3 100644
--- a/python/nlp/huchunk.py
+++ b/python/nlp/huchunk.py
@@ -2,6 +2,7 @@ import re
 import os
 import copy
 import base64
+import magic
 from dataclasses import dataclass
 from typing import List
 import numpy as np
@@ -373,6 +374,7 @@ class PptChunker(HuChunker):
         from pptx import Presentation
         ppt = Presentation(fnm)
         flds = self.Fields()
+        flds.text_chunks = []
         for slide in ppt.slides:
             for shape in slide.shapes:
                 if hasattr(shape, "text"):
@@ -391,11 +393,21 @@ class TextChunker(HuChunker):
     def __init__(self):
         super().__init__()
 
+    @staticmethod
+    def is_binary_file(file_path):
+        mime = magic.Magic(mime=True)
+        file_type = mime.from_file(file_path)
+        if 'text' in file_type:
+            return False
+        else:
+            return True
+
     def __call__(self, fnm):
         flds = self.Fields()
+        if self.is_binary_file(fnm):return flds
         with open(fnm, "r") as f:
             txt = f.read()
-            flds.text_chunks = self.naive_text_chunk(txt)
+            flds.text_chunks = [(c, None) for c in self.naive_text_chunk(txt)]
         flds.table_chunks = []
         return flds
 
diff --git a/python/svr/parse_user_docs.py b/python/svr/parse_user_docs.py
index 9ff14c40ed508f1b228d89085df78dd49bc9f943..3b9bec7b2c910aa96939ffd19909ecda152d6324 100644
--- a/python/svr/parse_user_docs.py
+++ b/python/svr/parse_user_docs.py
@@ -1,10 +1,15 @@
-import json, re, sys, os, hashlib, copy, glob, util, time, random
-from util.es_conn import HuEs, Postgres
+import json, os, sys, hashlib, copy, time, random, re, logging, torch
+from os.path import dirname, realpath
+sys.path.append(dirname(realpath(__file__)) + "/../")
+from util.es_conn import HuEs
+from util.db_conn import Postgres
+from util.minio_conn import HuMinio
 from util import rmSpace, findMaxDt
 from FlagEmbedding import FlagModel
 from nlp import huchunk, huqie
 import base64, hashlib
 from io import BytesIO
+import pandas as pd
 from elasticsearch_dsl import Q
 from parser import (
     PdfParser,
@@ -22,73 +27,115 @@ from nlp.huchunk import (
 ES = HuEs("infiniflow")
 BATCH_SIZE = 64
 PG = Postgres("infiniflow", "docgpt")
+MINIO = HuMinio("infiniflow")
 
 PDF = PdfChunker(PdfParser())
 DOC = DocxChunker(DocxParser())
 EXC = ExcelChunker(ExcelParser())
 PPT = PptChunker()
 
+UPLOAD_LOCATION = os.environ.get("UPLOAD_LOCATION", "./")
+logging.warning(f"The files are stored in {UPLOAD_LOCATION}, please check it!")
+
 
 def chuck_doc(name):
-    name = os.path.split(name)[-1].lower().split(".")[-1]
-    if name.find("pdf") >= 0: return PDF(name)
-    if name.find("doc") >= 0: return DOC(name)
-    if name.find("xlsx") >= 0: return EXC(name)
-    if name.find("ppt") >= 0: return PDF(name)
-    if name.find("pdf") >= 0: return PPT(name)
+    suff = os.path.split(name)[-1].lower().split(".")[-1]
+    if suff.find("pdf") >= 0: return PDF(name)
+    if suff.find("doc") >= 0: return DOC(name)
+    if re.match(r"(xlsx|xlsm|xltx|xltm)", suff): return EXC(name)
+    if suff.find("ppt") >= 0: return PPT(name)
     
-    if re.match(r"(txt|csv)", name): return TextChunker(name)
+    return TextChunker()(name)
 
 
 def collect(comm, mod, tm):
     sql = f"""
     select 
+    id as kb2doc_id,
+    kb_id,
     did,
-    uid,
-    doc_name,
-    location,
-    updated_at
-    from docinfo
-    where 
-    updated_at >= '{tm}' 
+    updated_at,
+    is_deleted
+    from kb2_doc
+    where
+    updated_at >= '{tm}'
     and kb_progress = 0
-    and type = 'doc'
-    and MOD(uid, {comm}) = {mod}
+    and MOD(did, {comm}) = {mod}
     order by updated_at asc
     limit 1000
     """
-    df = PG.select(sql)
-    df = df.fillna("")
-    mtm = str(df["updated_at"].max())[:19]
-    print("TOTAL:", len(df), "To: ", mtm)
-    return df, mtm
+    kb2doc = PG.select(sql)
+    if len(kb2doc) == 0:return pd.DataFrame()
+
+    sql = """
+    select 
+    did,
+    uid,
+    doc_name,
+    location,
+    size
+    from doc_info
+    where 
+    did in (%s)
+    """%",".join([str(i) for i in kb2doc["did"].unique()])
+    docs = PG.select(sql)
+    docs = docs.fillna("")
+    docs = docs.join(kb2doc.set_index("did"), on="did", how="left")
+
+    mtm = str(docs["updated_at"].max())[:19]
+    print("TOTAL:", len(docs), "To: ", mtm)
+    return docs
 
 
-def set_progress(did, prog, msg):
+def set_progress(kb2doc_id, prog, msg="Processing..."):
     sql = f"""
-    update docinfo set kb_progress={prog}, kb_progress_msg='{msg}' where did={did}
+    update kb2_doc set kb_progress={prog}, kb_progress_msg='{msg}' 
+    where
+    id={kb2doc_id}
     """
     PG.update(sql)
 
 
 def build(row):
     if row["size"] > 256000000:
-        set_progress(row["did"], -1, "File size exceeds( <= 256Mb )")
+        set_progress(row["kb2doc_id"], -1, "File size exceeds( <= 256Mb )")
         return  []
-    doc = {
-        "doc_id": row["did"],
-        "title_tks": huqie.qie(os.path.split(row["location"])[-1]),
-        "updated_at": row["updated_at"]
-    }
+    res = ES.search(Q("term", doc_id=row["did"]))
+    if ES.getTotal(res) > 0:
+        ES.updateScriptByQuery(Q("term", doc_id=row["did"]), 
+                               scripts="""
+                               if(!ctx._source.kb_id.contains('%s'))
+                                 ctx._source.kb_id.add('%s');
+                               """%(str(row["kb_id"]), str(row["kb_id"])),
+                               idxnm = index_name(row["uid"])
+                              )
+        set_progress(row["kb2doc_id"], 1, "Done")
+        return []
+
     random.seed(time.time())
-    set_progress(row["did"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
-    obj = chuck_doc(row["location"])
-    if not obj: 
-        set_progress(row["did"], -1, "Unsuported file type.")
+    set_progress(row["kb2doc_id"], random.randint(0, 20)/100., "Finished preparing! Start to slice file!")
+    try:
+        obj = chuck_doc(os.path.join(UPLOAD_LOCATION, row["location"]))
+    except Exception as e:
+        if re.search("(No such file|not found)", str(e)):
+            set_progress(row["kb2doc_id"], -1, "Can not find file <%s>"%row["doc_name"])
+        else:
+            set_progress(row["kb2doc_id"], -1, f"Internal system error: %s"%str(e).replace("'", ""))
+        return []
+
+    print(row["doc_name"], obj)
+    if not obj.text_chunks and not obj.table_chunks: 
+        set_progress(row["kb2doc_id"], 1, "Nothing added! Mostly, file type unsupported yet.")
         return  []
 
-    set_progress(row["did"], random.randint(20, 60)/100.)
+    set_progress(row["kb2doc_id"], random.randint(20, 60)/100., "Finished slicing files. Start to embedding the content.")
 
+    doc = {
+        "doc_id": row["did"],
+        "kb_id": [str(row["kb_id"])],
+        "title_tks": huqie.qie(os.path.split(row["location"])[-1]),
+        "updated_at": str(row["updated_at"]).replace("T", " ")[:19]
+    }
     output_buffer = BytesIO()
     docs = []
     md5 = hashlib.md5()
@@ -97,12 +144,11 @@ def build(row):
         md5.update((txt + str(d["doc_id"])).encode("utf-8"))
         d["_id"] = md5.hexdigest()
         d["content_ltks"] = huqie.qie(txt)
-        d["docnm_kwd"] = rmSpace(d["docnm_tks"])
         if not img:
             docs.append(d)
             continue
         img.save(output_buffer, format='JPEG')
-        d["img_bin"] = base64.b64encode(output_buffer.getvalue())
+        d["img_bin"] = str(output_buffer.getvalue())
         docs.append(d)
 
     for arr, img in obj.table_chunks:
@@ -115,9 +161,11 @@ def build(row):
                 docs.append(d)
                 continue
             img.save(output_buffer, format='JPEG')
-            d["img_bin"] = base64.b64encode(output_buffer.getvalue())
+            MINIO.put("{}-{}".format(row["uid"], row["kb_id"]), d["_id"],
+                      output_buffer.getvalue())
+            d["img_id"] = "{}-{}".format(row["uid"], row["kb_id"])
             docs.append(d)
-    set_progress(row["did"], random.randint(60, 70)/100., "Finished slicing. Start to embedding the content.")
+    set_progress(row["kb2doc_id"], random.randint(60, 70)/100., "Continue embedding the content.")
 
     return docs
 
@@ -127,7 +175,7 @@ def index_name(uid):return f"docgpt_{uid}"
 def init_kb(row):
     idxnm = index_name(row["uid"])
     if ES.indexExist(idxnm): return
-    return ES.createIdx(idxnm, json.load(open("res/mapping.json", "r")))
+    return ES.createIdx(idxnm, json.load(open("conf/mapping.json", "r")))
 
 
 model = None
@@ -138,27 +186,59 @@ def embedding(docs):
     vects = 0.1 * tts + 0.9 * cnts
     assert len(vects) == len(docs)
     for i,d in enumerate(docs):d["q_vec"] = vects[i].tolist()
-    for d in docs:
-        set_progress(d["doc_id"], random.randint(70, 95)/100., 
-                     "Finished embedding! Start to build index!")
+
+
+def rm_doc_from_kb(df):
+    if len(df) == 0:return
+    for _,r in df.iterrows():
+        ES.updateScriptByQuery(Q("term", doc_id=r["did"]), 
+                               scripts="""
+                               if(ctx._source.kb_id.contains('%s'))
+                                 ctx._source.kb_id.remove(
+                                     ctx._source.kb_id.indexOf('%s')
+                               );
+                                """%(str(r["kb_id"]),str(r["kb_id"])),
+                               idxnm = index_name(r["uid"])
+                              )
+    if len(df) == 0:return
+    sql = """
+    delete from kb2_doc where id in (%s)
+    """%",".join([str(i) for i in df["kb2doc_id"]])
+    PG.update(sql)
 
 
 def main(comm, mod):
+    global model
+    from FlagEmbedding import FlagModel
+    model = FlagModel('/opt/home/kevinhu/data/bge-large-zh-v1.5/',
+                      query_instruction_for_retrieval="为这个句子生成表示以用于检索相关文章:",
+                      use_fp16=torch.cuda.is_available())
     tm_fnm = f"res/{comm}-{mod}.tm"
-    tmf = open(tm_fnm, "a+")
     tm = findMaxDt(tm_fnm)
-    rows, tm = collect(comm, mod, tm)
-    for r in rows:
-        if r["is_deleted"]: 
-            ES.deleteByQuery(Q("term", dock_id=r["did"]), index_name(r["uid"]))
-            continue
+    rows = collect(comm, mod, tm)
+    if len(rows) == 0:return
 
+    rm_doc_from_kb(rows.loc[rows.is_deleted == True])
+    rows = rows.loc[rows.is_deleted == False].reset_index(drop=True)
+    if len(rows) == 0:return
+    tmf = open(tm_fnm, "a+")
+    for _, r in rows.iterrows():
         cks = build(r)
+        if not cks:
+            tmf.write(str(r["updated_at"]) + "\n")
+            continue
         ## TODO: exception handler
         ## set_progress(r["did"], -1, "ERROR: ")
         embedding(cks)
-        if cks: init_kb(r)
-        ES.bulk(cks, index_name(r["uid"]))
+
+        set_progress(r["kb2doc_id"], random.randint(70, 95)/100., 
+                     "Finished embedding! Start to build index!")
+        init_kb(r)
+        es_r = ES.bulk(cks, index_name(r["uid"]))
+        if es_r:
+            set_progress(r["kb2doc_id"], -1, "Index failure!")
+            print(es_r)
+        else: set_progress(r["kb2doc_id"], 1., "Done!")
         tmf.write(str(r["updated_at"]) + "\n")
     tmf.close()
 
@@ -166,6 +246,5 @@ def main(comm, mod):
 if __name__ == "__main__":
     from mpi4py import MPI
     comm = MPI.COMM_WORLD
-    rank = comm.Get_rank()
-    main(comm, rank)
+    main(comm.Get_size(), comm.Get_rank())
 
diff --git a/python/util/config.py b/python/util/config.py
index 78429e570ab76dfd94434e44333a33e5c84ce763..372166d12bd9a39588c6c1275f198dfbd3e8fca4 100755
--- a/python/util/config.py
+++ b/python/util/config.py
@@ -14,9 +14,9 @@ class Config:
         self.env = env
         if env == "spark":CF.read("./cv.cnf")
 
-    def get(self, key):
+    def get(self, key, default=None):
         global CF
-        return CF.get(self.env, key)
+        return CF[self.env].get(key, default)
 
 def init(env):
     return Config(env)
diff --git a/python/util/db_conn.py b/python/util/db_conn.py
index ca9e4baedf54f8e10bebf4eb02da5ce94171994e..3c72ee3241df88a743eee52e889c447a8422bd9e 100644
--- a/python/util/db_conn.py
+++ b/python/util/db_conn.py
@@ -49,7 +49,11 @@ class Postgres(object):
                 cur = self.conn.cursor()
                 cur.execute(sql)
                 updated_rows = cur.rowcount
+<<<<<<< HEAD
+                self.conn.commit()
+=======
                 conn.commit()
+>>>>>>> upstream/main
                 cur.close()
                 return updated_rows
             except Exception as e:
diff --git a/python/util/es_conn.py b/python/util/es_conn.py
index ea917a7238b9e3af3fb00f46f2d30e34a08e7dd5..84c60880a00378b92445f578a227196eb4e0b7f4 100755
--- a/python/util/es_conn.py
+++ b/python/util/es_conn.py
@@ -5,10 +5,10 @@ import time
 import copy
 import elasticsearch
 from elasticsearch import Elasticsearch
-from elasticsearch_dsl import UpdateByQuery, Search, Index
+from elasticsearch_dsl import UpdateByQuery, Search, Index, Q
 from util import config
 
-print("Elasticsearch version: ", elasticsearch.__version__)
+logging.info("Elasticsearch version: ", elasticsearch.__version__)
 
 
 def instance(env):
@@ -20,7 +20,7 @@ def instance(env):
         timeout=600
     )
 
-    print("ES: ", ES_DRESS, ES.info())
+    logging.info("ES: ", ES_DRESS, ES.info())
 
     return ES
 
@@ -31,7 +31,7 @@ class HuEs:
         self.info = {}
         self.config = config.init(env)
         self.conn()
-        self.idxnm = self.config.get("idx_nm","")
+        self.idxnm = self.config.get("idx_nm", "")
         if not self.es.ping():
             raise Exception("Can't connect to ES cluster")
 
@@ -46,6 +46,7 @@ class HuEs:
                     break
             except Exception as e:
                 logging.error("Fail to connect to es: " + str(e))
+                time.sleep(1)
 
     def version(self):
         v = self.info.get("version", {"number": "5.6"})
@@ -121,7 +122,6 @@ class HuEs:
             acts.append(
                 {"update": {"_id": id, "_index": ids[id]["_index"]}, "retry_on_conflict": 100})
             acts.append({"doc": d, "doc_as_upsert": "true"})
-            logging.info("bulk upsert: %s" % id)
 
         res = []
         for _ in range(100):
@@ -148,7 +148,6 @@ class HuEs:
                 return res
             except Exception as e:
                 logging.warn("Fail to bulk: " + str(e))
-                print(e)
                 if re.search(r"(Timeout|time out)", str(e), re.IGNORECASE):
                     time.sleep(3)
                     continue
@@ -229,7 +228,7 @@ class HuEs:
         return False
 
     def search(self, q, idxnm=None, src=False, timeout="2s"):
-        print(json.dumps(q, ensure_ascii=False))
+        if not isinstance(q, dict): q = Search().query(q).to_dict()
         for i in range(3):
             try:
                 res = self.es.search(index=(self.idxnm if not idxnm else idxnm),
@@ -271,9 +270,31 @@ class HuEs:
                               str(e) + "【Q】:" + str(q.to_dict()))
                 if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
                     continue
+                self.conn()
 
         return False
 
+
+    def updateScriptByQuery(self, q, scripts, idxnm=None):
+        ubq = UpdateByQuery(index=self.idxnm if not idxnm else idxnm).using(self.es).query(q)
+        ubq = ubq.script(source=scripts)
+        ubq = ubq.params(refresh=True)
+        ubq = ubq.params(slices=5)
+        ubq = ubq.params(conflicts="proceed")
+        for i in range(3):
+            try:
+                r = ubq.execute()
+                return True
+            except Exception as e:
+                logging.error("ES updateByQuery exception: " +
+                              str(e) + "【Q】:" + str(q.to_dict()))
+                if str(e).find("Timeout") > 0 or str(e).find("Conflict") > 0:
+                    continue
+                self.conn()
+
+        return False
+
+
     def deleteByQuery(self, query, idxnm=""):
         for i in range(3):
             try:
@@ -307,7 +328,6 @@ class HuEs:
                                        routing=routing, refresh=False)  # , doc_type="_doc")
                 return True
             except Exception as e:
-                print(e)
                 logging.error("ES update exception: " + str(e) + " id:" + str(id) + ", version:" + str(self.version()) +
                               json.dumps(script, ensure_ascii=False))
                 if str(e).find("Timeout") > 0:
diff --git a/python/util/minio_conn.py b/python/util/minio_conn.py
new file mode 100644
index 0000000000000000000000000000000000000000..a8e20bac7f96a8d234a3e417855bb28445504c15
--- /dev/null
+++ b/python/util/minio_conn.py
@@ -0,0 +1,73 @@
+import logging
+import time
+from util import config
+from minio import Minio
+from io import BytesIO
+
+class HuMinio(object):
+    def __init__(self, env):
+        self.config = config.init(env)
+        self.conn = None
+        self.__open__()
+
+    def __open__(self):
+        try:
+            if self.conn:self.__close__()
+        except Exception as e:
+            pass
+
+        try:
+            self.conn = Minio(self.config.get("minio_host"),
+                              access_key=self.config.get("minio_usr"),
+                              secret_key=self.config.get("minio_pwd"),
+                              secure=False
+                             )
+        except Exception as e:
+            logging.error("Fail to connect %s "%self.config.get("minio_host") + str(e))
+
+
+    def __close__(self):
+        del self.conn
+        self.conn = None
+
+
+    def put(self, bucket, fnm, binary):
+        for _ in range(10):
+            try:
+                if not self.conn.bucket_exists(bucket):
+                    self.conn.make_bucket(bucket)
+
+                r = self.conn.put_object(bucket, fnm, 
+                                         BytesIO(binary),
+                                         len(binary)
+                                        )
+                return r
+            except Exception as e:
+                logging.error(f"Fail put {bucket}/{fnm}: "+str(e))
+                self.__open__()
+                time.sleep(1)
+
+
+    def get(self, bucket, fnm):
+        for _ in range(10):
+            try:
+                r = self.conn.get_object(bucket, fnm)
+                return r.read()
+            except Exception as e:
+                logging.error(f"Fail get {bucket}/{fnm}: "+str(e))
+                self.__open__()
+                time.sleep(1)
+        return 
+
+if __name__ == "__main__":
+    conn = HuMinio("infiniflow")
+    fnm = "/opt/home/kevinhu/docgpt/upload/13/11-408.jpg"
+    from PIL import Image
+    img = Image.open(fnm)
+    buff = BytesIO()
+    img.save(buff, format='JPEG') 
+    print(conn.put("test", "11-408.jpg", buff.getvalue()))
+    bts = conn.get("test", "11-408.jpg")
+    img = Image.open(BytesIO(bts))
+    img.save("test.jpg")
+
diff --git a/src/api/dialog_info.rs b/src/api/dialog_info.rs
index e45dc9bb38059ab4baf7cb98eef925239dccc876..fca98c7c89a4648bf3bb0a132faa35cbfedcf5b8 100644
--- a/src/api/dialog_info.rs
+++ b/src/api/dialog_info.rs
@@ -1,5 +1,8 @@
 use std::collections::HashMap;
-use actix_web::{get, HttpResponse, post, web};
+use actix_web::{HttpResponse, post, web};
+use serde::Deserialize;
+use serde_json::Value;
+use serde_json::json;
 use crate::api::JsonResponse;
 use crate::AppState;
 use crate::entity::dialog_info;
@@ -7,13 +10,46 @@ use crate::errors::AppError;
 use crate::service::dialog_info::Query;
 use crate::service::dialog_info::Mutation;
 
-#[get("/v1.0/dialogs")]
-async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
-    let dialogs = Query::find_dialog_infos_by_uid(&data.conn, model.uid).await?;
-
+#[derive(Debug, Deserialize)]
+pub struct ListParams {
+    pub uid: i64,
+    pub dialog_id: Option<i64>
+}
+#[post("/v1.0/dialogs")]
+async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
     let mut result = HashMap::new();
-    result.insert("dialogs", dialogs);
+    if let Some(dia_id) = params.dialog_id{
+        let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?.unwrap();
+        let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
+        print!("{:?}", dia.history);
+        let hist:Value = serde_json::from_str(&dia.history)?;
+        let detail = json!({
+            "dialog_id": dia_id,
+            "dialog_name": dia.dialog_name.to_owned(),
+            "created_at": dia.created_at.to_string().to_owned(),
+            "updated_at": dia.updated_at.to_string().to_owned(),
+            "history": hist,
+            "kb_info": kb
+        });
 
+        result.insert("dialogs", vec![detail]);
+    }
+    else{
+        let mut dias = Vec::<Value>::new();
+        for dia in Query::find_dialog_infos_by_uid(&data.conn, params.uid).await?{
+            let kb = crate::service::kb_info::Query::find_kb_info_by_id(&data.conn, dia.kb_id).await?.unwrap();
+            let hist:Value = serde_json::from_str(&dia.history)?;
+            dias.push(json!({
+                "dialog_id": dia.dialog_id,
+                "dialog_name": dia.dialog_name.to_owned(),
+                "created_at": dia.created_at.to_string().to_owned(),
+                "updated_at": dia.updated_at.to_string().to_owned(),
+                "history": hist,
+                "kb_info": kb
+            }));
+        }
+        result.insert("dialogs", dias);
+    }
     let json_response = JsonResponse {
         code: 200,
         err: "".to_owned(),
@@ -25,17 +61,19 @@ async fn list(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -
         .body(serde_json::to_string(&json_response)?))
 }
 
-#[get("/v1.0/dialog")]
-async fn detail(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
-    let dialogs = Query::find_dialog_info_by_id(&data.conn, model.dialog_id).await?;
-
-    let mut result = HashMap::new();
-    result.insert("dialogs", dialogs);
+#[derive(Debug, Deserialize)]
+pub struct RmParams {
+    pub uid: i64,
+    pub dialog_id: i64
+}
+#[post("/v1.0/delete_dialog")]
+async fn delete(params: web::Json<RmParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let _ = Mutation::delete_dialog_info(&data.conn, params.dialog_id).await?;
 
     let json_response = JsonResponse {
         code: 200,
         err: "".to_owned(),
-        data: result,
+        data: (),
     };
 
     Ok(HttpResponse::Ok()
@@ -43,14 +81,30 @@ async fn detail(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
         .body(serde_json::to_string(&json_response)?))
 }
 
-#[post("/v1.0/delete_dialog")]
-async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
-    let _ = Mutation::delete_dialog_info(&data.conn, model.dialog_id).await?;
+#[derive(Debug, Deserialize)]
+pub struct CreateParams {
+    pub uid: i64,
+    pub dialog_id: Option<i64>,
+    pub kb_id: i64,
+    pub name: String
+}
+#[post("/v1.0/create_dialog")]
+async fn create(param: web::Json<CreateParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let mut result = HashMap::new();
+    if let Some(dia_id) = param.dialog_id {
+        result.insert("dialog_id", dia_id);
+        let dia = Query::find_dialog_info_by_id(&data.conn, dia_id).await?;
+        let _ = Mutation::update_dialog_info_by_id(&data.conn, dia_id, &param.name, &dia.unwrap().history).await?;
+    }
+    else{
+        let dia = Mutation::create_dialog_info(&data.conn, param.uid, param.kb_id, &param.name).await?;
+        result.insert("dialog_id", dia.dialog_id.unwrap());
+    }
 
     let json_response = JsonResponse {
         code: 200,
         err: "".to_owned(),
-        data: (),
+        data: result,
     };
 
     Ok(HttpResponse::Ok()
@@ -58,19 +112,31 @@ async fn delete(model: web::Json<dialog_info::Model>, data: web::Data<AppState>)
         .body(serde_json::to_string(&json_response)?))
 }
 
-#[post("/v1.0/create_kb")]
-async fn create(model: web::Json<dialog_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
-    let model = Mutation::create_dialog_info(&data.conn, model.into_inner()).await?;
-
-    let mut result = HashMap::new();
-    result.insert("dialog_id", model.dialog_id.unwrap());
 
-    let json_response = JsonResponse {
+#[derive(Debug, Deserialize)]
+pub struct UpdateHistoryParams {
+    pub uid: i64,
+    pub dialog_id: i64,
+    pub history: Value
+}
+#[post("/v1.0/update_history")]
+async fn update_history(param: web::Json<UpdateHistoryParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let mut json_response = JsonResponse {
         code: 200,
         err: "".to_owned(),
-        data: result,
+        data: (),
     };
 
+    if let Some(dia) = Query::find_dialog_info_by_id(&data.conn, param.dialog_id).await?{
+        let _ = Mutation::update_dialog_info_by_id(&data.conn, param.dialog_id, &dia.dialog_name, 
+                    &param.history.to_string()).await?;
+    }
+    else{
+        json_response.code = 500;
+        json_response.err = "Can't find dialog data!".to_owned();
+    }
+    
+
     Ok(HttpResponse::Ok()
         .content_type("application/json")
         .body(serde_json::to_string(&json_response)?))
diff --git a/src/api/doc_info.rs b/src/api/doc_info.rs
index df7a2d923d15c49fad051b55856b2587994569eb..44102bd4c1a4cecabebba873e2066ec592785859 100644
--- a/src/api/doc_info.rs
+++ b/src/api/doc_info.rs
@@ -1,12 +1,8 @@
 use std::collections::HashMap;
 use std::io::Write;
-use std::slice::Chunks;
-//use actix_multipart::{Multipart, MultipartError, Field};
 use actix_multipart_extract::{File, Multipart, MultipartForm};
 use actix_web::{get, HttpResponse, post, web};
-use actix_web::web::Bytes;
-use chrono::Local;
-use futures_util::StreamExt;
+use chrono::{Utc, FixedOffset};
 use sea_orm::DbConn;
 use crate::api::JsonResponse;
 use crate::AppState;
@@ -15,14 +11,17 @@ use crate::errors::AppError;
 use crate::service::doc_info::{Mutation, Query};
 use serde::Deserialize;
 
+fn now()->chrono::DateTime<FixedOffset>{
+    Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
+}
 
 #[derive(Debug, Deserialize)]
-pub struct Params {
+pub struct ListParams {
     pub uid: i64,
     pub filter: FilterParams,
     pub sortby: String,
-    pub page: u64,
-    pub per_page: u64,
+    pub page: Option<u32>,
+    pub per_page: Option<u32>,
 }
 
 #[derive(Debug, Deserialize)]
@@ -33,14 +32,8 @@ pub struct FilterParams {
     pub kb_id: Option<i64>,
 }
 
-#[derive(Debug, Deserialize)]
-pub struct MvParams {
-    pub dids: Vec<i64>,
-    pub dest_did: i64,
-}
-
-#[get("/v1.0/docs")]
-async fn list(params: web::Json<Params>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+#[post("/v1.0/docs")]
+async fn list(params: web::Json<ListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
     let docs = Query::find_doc_infos_by_params(&data.conn, params.into_inner())
         .await?;
 
@@ -69,21 +62,21 @@ pub struct UploadForm {
 #[post("/v1.0/upload")]
 async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
     let uid = payload.uid;
-    async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64) -> String {
+    async fn add_number_to_filename(file_name: String, conn:&DbConn, uid:i64, parent_id:i64) -> String {
         let mut i = 0;
         let mut new_file_name = file_name.to_string();
         let arr: Vec<&str> = file_name.split(".").collect();
         let suffix = String::from(arr[arr.len()-1]);
         let preffix = arr[..arr.len()-1].join(".");
-        let mut docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
+        let mut docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
         while docs.len()>0 {
             i += 1;
             new_file_name = format!("{}_{}.{}", preffix, i, suffix);
-            docs = Query::find_doc_infos_by_name(conn, uid, new_file_name.clone()).await.unwrap();
+            docs = Query::find_doc_infos_by_name(conn, uid, &new_file_name, Some(parent_id)).await.unwrap();
         }
         new_file_name
     }
-    let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid).await;
+    let fnm = add_number_to_filename(payload.file_field.name.clone(), &data.conn, uid, payload.did).await;
 
     std::fs::create_dir_all(format!("./upload/{}/", uid));
     let filepath = format!("./upload/{}/{}-{}", payload.uid, payload.did, fnm.clone());
@@ -95,13 +88,11 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
         uid:  uid,
         doc_name: fnm,
         size: payload.file_field.bytes.len() as i64,
-        kb_infos: Vec::new(),
-        kb_progress: 0.0,
-        kb_progress_msg: "".to_string(),
         location: filepath,
         r#type: "doc".to_string(),
-        created_at: Local::now().date_naive(),
-        updated_at: Local::now().date_naive(),
+        created_at: now(),
+        updated_at: now(),
+        is_deleted:Default::default(),
     }).await?;
 
     let _ = Mutation::place_doc(&data.conn, payload.did, doc.did.unwrap()).await?;
@@ -109,11 +100,14 @@ async fn upload(payload: Multipart<UploadForm>, data: web::Data<AppState>) -> Re
     Ok(HttpResponse::Ok().body("File uploaded successfully"))
 }
 
+#[derive(Deserialize, Debug)]
+pub struct RmDocsParam {
+    uid: i64, 
+    dids: Vec<i64>
+}
 #[post("/v1.0/delete_docs")]
-async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
-    for doc_id in doc_ids.iter() {
-        let _ = Mutation::delete_doc_info(&data.conn, *doc_id).await?;
-    }
+async fn delete(params: web::Json<RmDocsParam>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let _ = Mutation::delete_doc_info(&data.conn, &params.dids).await?;
 
     let json_response = JsonResponse {
         code: 200,
@@ -126,6 +120,13 @@ async fn delete(doc_ids: web::Json<Vec<i64>>, data: web::Data<AppState>) -> Resu
         .body(serde_json::to_string(&json_response)?))
 }
 
+#[derive(Debug, Deserialize)]
+pub struct MvParams {
+    pub uid:i64,
+    pub dids: Vec<i64>,
+    pub dest_did: i64,
+}
+
 #[post("/v1.0/mv_docs")]
 async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
     Mutation::mv_doc_info(&data.conn, params.dest_did, &params.dids).await?;
@@ -140,3 +141,61 @@ async fn mv(params: web::Json<MvParams>, data: web::Data<AppState>) -> Result<Ht
         .content_type("application/json")
         .body(serde_json::to_string(&json_response)?))
 }
+
+#[derive(Debug, Deserialize)]
+pub struct NewFoldParams {
+    pub uid: i64,
+    pub parent_id: i64,
+    pub name: String
+}
+
+#[post("/v1.0/new_folder")]
+async fn new_folder(params: web::Json<NewFoldParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let doc = Mutation::create_doc_info(&data.conn, Model {
+        did:Default::default(),
+        uid:  params.uid,
+        doc_name: params.name.to_string(),
+        size:0,
+        r#type: "folder".to_string(),
+        location: "".to_owned(),
+        created_at: now(),
+        updated_at: now(),
+        is_deleted:Default::default(),
+    }).await?;
+    let _ = Mutation::place_doc(&data.conn, params.parent_id, doc.did.unwrap()).await?;
+
+    Ok(HttpResponse::Ok().body("Folder created successfully"))
+}
+
+#[derive(Debug, Deserialize)]
+pub struct RenameParams {
+    pub uid: i64,
+    pub did: i64,
+    pub name: String
+}
+
+#[post("/v1.0/rename")]
+async fn rename(params: web::Json<RenameParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let docs = Query::find_doc_infos_by_name(&data.conn, params.uid, &params.name, None).await?;
+    if docs.len()>0{
+        let json_response = JsonResponse {
+            code: 500,
+            err: "Name duplicated!".to_owned(),
+            data: (),
+        };
+        return Ok(HttpResponse::Ok()
+            .content_type("application/json")
+            .body(serde_json::to_string(&json_response)?));
+    }
+    let doc = Mutation::rename(&data.conn, params.did, &params.name).await?;
+
+    let json_response = JsonResponse {
+        code: 200,
+        err: "".to_owned(),
+        data: doc,
+    };
+
+    Ok(HttpResponse::Ok()
+        .content_type("application/json")
+        .body(serde_json::to_string(&json_response)?))
+}
diff --git a/src/api/kb_info.rs b/src/api/kb_info.rs
index 80f75e01f55150bc1b6103691891b6ba306d3c1d..5452be021bf706e4b42adb746027cf166d58234a 100644
--- a/src/api/kb_info.rs
+++ b/src/api/kb_info.rs
@@ -60,6 +60,20 @@ async fn add_docs_to_kb(param: web::Json<AddDocs2KbParams>, data: web::Data<AppS
         .body(serde_json::to_string(&json_response)?))
 }
 
+#[post("/v1.0/anti_kb_docs")]
+async fn anti_kb_docs(param: web::Json<AddDocs2KbParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let _ = Mutation::remove_docs(&data.conn, param.dids.to_owned(), Some(param.kb_id)).await?;
+
+    let json_response = JsonResponse {
+        code: 200,
+        err: "".to_owned(),
+        data: (),
+    };
+
+    Ok(HttpResponse::Ok()
+        .content_type("application/json")
+        .body(serde_json::to_string(&json_response)?))
+}
 #[get("/v1.0/kbs")]
 async fn list(model: web::Json<kb_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
     let kbs = Query::find_kb_infos_by_uid(&data.conn, model.uid).await?;
@@ -91,4 +105,28 @@ async fn delete(model: web::Json<kb_info::Model>, data: web::Data<AppState>) ->
     Ok(HttpResponse::Ok()
         .content_type("application/json")
         .body(serde_json::to_string(&json_response)?))
+}
+
+#[derive(Clone, Debug, Serialize, Deserialize)]
+pub struct DocIdsParams {
+    pub uid: i64,
+    pub dids: Vec<i64>
+}
+
+#[post("/v1.0/all_relevents")]
+async fn all_relevents(params: web::Json<DocIdsParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let dids = crate::service::doc_info::Query::all_descendent_ids(&data.conn, &params.dids).await?;
+    let mut result = HashMap::new();
+    let kbs = Query::find_kb_by_docs(&data.conn, dids).await?;
+    result.insert("kbs", kbs);
+    let json_response = JsonResponse {
+        code: 200,
+        err: "".to_owned(),
+        data: result,
+    };
+
+    Ok(HttpResponse::Ok()
+      .content_type("application/json")
+      .body(serde_json::to_string(&json_response)?))
+
 }
\ No newline at end of file
diff --git a/src/api/tag_info.rs b/src/api/tag_info.rs
index 28f09b19bb7585a41e2575b1789f8276a14c8589..23da6405b469f950fcac2d0309f0ce869704a672 100644
--- a/src/api/tag_info.rs
+++ b/src/api/tag_info.rs
@@ -1,6 +1,7 @@
 use std::collections::HashMap;
 use actix_web::{get, HttpResponse, post, web};
 use actix_web_httpauth::middleware::HttpAuthentication;
+use serde::Deserialize;
 use crate::validator;
 use crate::api::JsonResponse;
 use crate::AppState;
@@ -8,6 +9,11 @@ use crate::entity::tag_info;
 use crate::errors::AppError;
 use crate::service::tag_info::{Mutation, Query};
 
+#[derive(Debug, Deserialize)]
+pub struct TagListParams {
+    pub uid: i64
+}
+
 #[post("/v1.0/create_tag")]
 async fn create(model: web::Json<tag_info::Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
     let model = Mutation::create_tag(&data.conn, model.into_inner()).await?;
@@ -41,9 +47,12 @@ async fn delete(model: web::Json<tag_info::Model>, data: web::Data<AppState>) ->
         .body(serde_json::to_string(&json_response)?))
 }
 
-#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
-async fn list(data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
-    let tags = Query::find_tag_infos(&data.conn).await?;
+
+//#[get("/v1.0/tags", wrap = "HttpAuthentication::bearer(validator)")]
+
+#[post("/v1.0/tags")]
+async fn list(param: web::Json<TagListParams>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let tags = Query::find_tags_by_uid(param.uid, &data.conn).await?;
 
     let mut result = HashMap::new();
     result.insert("tags", tags);
diff --git a/src/api/user_info.rs b/src/api/user_info.rs
index 625a243780d9213125693f58404b94055c65a443..51c1422bcfd190b927794be28ef45206e4751d29 100644
--- a/src/api/user_info.rs
+++ b/src/api/user_info.rs
@@ -1,10 +1,13 @@
+use std::collections::HashMap;
+
 use actix_identity::Identity;
-use actix_web::{get, HttpResponse, post, web};
+use actix_web::{HttpResponse, post, web};
 use serde::{Deserialize, Serialize};
 use crate::api::JsonResponse;
 use crate::AppState;
 use crate::entity::user_info::Model;
 use crate::errors::{AppError, UserError};
+use crate::service::user_info::Mutation;
 use crate::service::user_info::Query;
 
 pub(crate) fn create_auth_token(user: &Model) -> u64 {
@@ -32,6 +35,7 @@ async fn login(
 ) -> Result<HttpResponse, AppError> {
     match Query::login(&data.conn, &input.email, &input.password).await? {
         Some(user) => {
+            let _ = Mutation::update_login_status(user.uid,&data.conn).await?;
             let token = create_auth_token(&user).to_string();
 
             identity.remember(token.clone());
@@ -49,4 +53,34 @@ async fn login(
         }
         None => Err(UserError::LoginFailed.into())
     }
+}
+
+#[post("/v1.0/register")]
+async fn register(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let mut result = HashMap::new();
+    let usr = Mutation::create_user(&data.conn, &model).await?;
+    result.insert("uid", usr.uid.unwrap());
+    let json_response = JsonResponse {
+        code: 200,
+        err: "".to_owned(),
+        data: result,
+    };
+
+    Ok(HttpResponse::Ok()
+        .content_type("application/json")
+        .body(serde_json::to_string(&json_response)?))
+}
+
+#[post("/v1.0/setting")]
+async fn setting(model: web::Json<Model>, data: web::Data<AppState>) -> Result<HttpResponse, AppError> {
+    let _ = Mutation::update_user_by_id(&data.conn, &model).await?;
+    let json_response = JsonResponse {
+        code: 200,
+        err: "".to_owned(),
+        data: (),
+    };
+
+    Ok(HttpResponse::Ok()
+        .content_type("application/json")
+        .body(serde_json::to_string(&json_response)?))
 }
\ No newline at end of file
diff --git a/src/entity/dialog_info.rs b/src/entity/dialog_info.rs
index a97999b2ecd99dd477c459a4657b6972a9ddd3ef..8db6e24e563499d94b8c2c6569eca492f0868731 100644
--- a/src/entity/dialog_info.rs
+++ b/src/entity/dialog_info.rs
@@ -1,3 +1,4 @@
+use chrono::{DateTime, FixedOffset};
 use sea_orm::entity::prelude::*;
 use serde::{Deserialize, Serialize};
 
@@ -8,13 +9,17 @@ pub struct Model {
     pub dialog_id: i64,
     #[sea_orm(index)]
     pub uid: i64,
+    #[serde(skip_deserializing)]
+    pub kb_id: i64,
     pub dialog_name: String,
     pub history: String,
 
     #[serde(skip_deserializing)]
-    pub created_at: Date,
+    pub created_at: DateTime<FixedOffset>,
+    #[serde(skip_deserializing)]
+    pub updated_at: DateTime<FixedOffset>,
     #[serde(skip_deserializing)]
-    pub updated_at: Date,
+    pub is_deleted: bool
 }
 
 #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
diff --git a/src/entity/doc_info.rs b/src/entity/doc_info.rs
index d2ccd411041b48c8c663f873b48b44e87b469d68..7492a5d442f4eb218cf71ab3bf3d6d980d38df0d 100644
--- a/src/entity/doc_info.rs
+++ b/src/entity/doc_info.rs
@@ -1,6 +1,7 @@
 use sea_orm::entity::prelude::*;
 use serde::{Deserialize, Serialize};
 use crate::entity::kb_info;
+use chrono::{DateTime, FixedOffset};
 
 #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
 #[sea_orm(table_name = "doc_info")]
@@ -13,16 +14,14 @@ pub struct Model {
     pub size: i64,
     #[sea_orm(column_name = "type")]
     pub r#type: String,
-    pub kb_progress: f32,
-    pub kb_progress_msg: String,
+    #[serde(skip_deserializing)]
     pub location: String,
-    #[sea_orm(ignore)]
-    pub kb_infos: Vec<kb_info::Model>,
-
     #[serde(skip_deserializing)]
-    pub created_at: Date,
+    pub created_at: DateTime<FixedOffset>,
+    #[serde(skip_deserializing)]
+    pub updated_at: DateTime<FixedOffset>,
     #[serde(skip_deserializing)]
-    pub updated_at: Date,
+    pub is_deleted: bool
 }
 
 #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
@@ -48,7 +47,7 @@ impl Related<super::kb_info::Entity> for Entity {
     }
 }
 
-impl Related<Entity> for Entity {
+impl Related<super::doc2_doc::Entity> for Entity {
     fn to() -> RelationDef {
         super::doc2_doc::Relation::Parent.def()
     }
diff --git a/src/entity/kb2_doc.rs b/src/entity/kb2_doc.rs
index 1d82756f0270a3812949b12a7414bd07d6040b19..0c0522fc35e12d460a000d019231b91e6575059c 100644
--- a/src/entity/kb2_doc.rs
+++ b/src/entity/kb2_doc.rs
@@ -1,7 +1,8 @@
 use sea_orm::entity::prelude::*;
 use serde::{Deserialize, Serialize};
+use chrono::{DateTime, FixedOffset};
 
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
+#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
 #[sea_orm(table_name = "kb2_doc")]
 pub struct Model {
     #[sea_orm(primary_key, auto_increment = true)]
@@ -10,6 +11,17 @@ pub struct Model {
     pub kb_id: i64,
     #[sea_orm(index)]
     pub did: i64,
+<<<<<<< HEAD
+    #[serde(skip_deserializing)]
+    pub kb_progress: f32,
+    #[serde(skip_deserializing)]
+    pub kb_progress_msg: String,
+    #[serde(skip_deserializing)]
+    pub updated_at: DateTime<FixedOffset>,
+    #[serde(skip_deserializing)]
+    pub is_deleted: bool,
+=======
+>>>>>>> upstream/main
 }
 
 #[derive(Debug, Clone, Copy, EnumIter)]
diff --git a/src/entity/kb_info.rs b/src/entity/kb_info.rs
index 97ea2bb9d097218edded7c0e152b2aa6f25c757a..97d9ebb86c287c96ec9c390eebe6bc185a1ff355 100644
--- a/src/entity/kb_info.rs
+++ b/src/entity/kb_info.rs
@@ -1,10 +1,12 @@
 use sea_orm::entity::prelude::*;
 use serde::{Deserialize, Serialize};
+use chrono::{DateTime, FixedOffset};
 
 #[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
 #[sea_orm(table_name = "kb_info")]
 pub struct Model {
     #[sea_orm(primary_key, auto_increment = false)]
+    #[serde(skip_deserializing)]
     pub kb_id: i64,
     #[sea_orm(index)]
     pub uid: i64,
@@ -12,9 +14,11 @@ pub struct Model {
     pub icon: i16,
 
     #[serde(skip_deserializing)]
-    pub created_at: Date,
+    pub created_at: DateTime<FixedOffset>,
+    #[serde(skip_deserializing)]
+    pub updated_at: DateTime<FixedOffset>,
     #[serde(skip_deserializing)]
-    pub updated_at: Date,
+    pub is_deleted: bool,
 }
 
 #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
diff --git a/src/entity/tag_info.rs b/src/entity/tag_info.rs
index b6c1a4ad5c9834aa56ccc9184d4ef4a0caffcb12..107472646680350acd1293517cb59687821dc6fb 100644
--- a/src/entity/tag_info.rs
+++ b/src/entity/tag_info.rs
@@ -1,23 +1,34 @@
 use sea_orm::entity::prelude::*;
 use serde::{Deserialize, Serialize};
+use chrono::{DateTime, FixedOffset};
 
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Deserialize, Serialize)]
+#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Deserialize, Serialize)]
 #[sea_orm(table_name = "tag_info")]
 pub struct Model {
     #[sea_orm(primary_key)]
     #[serde(skip_deserializing)]
     pub tid: i64,
+    #[sea_orm(index)]
     pub uid: i64,
     pub tag_name: String,
+<<<<<<< HEAD
+    #[serde(skip_deserializing)]
+    pub regx: String,
+    pub color: i16,
+    pub icon: i16,
+    #[serde(skip_deserializing)]
+    pub folder_id: i64,
+=======
     pub regx: Option<String>,
     pub color: u16,
     pub icon: u16,
     pub dir: Option<String>,
+>>>>>>> upstream/main
 
     #[serde(skip_deserializing)]
-    pub created_at: Date,
+    pub created_at: DateTime<FixedOffset>,
     #[serde(skip_deserializing)]
-    pub updated_at: Date,
+    pub updated_at: DateTime<FixedOffset>,
 }
 
 #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
diff --git a/src/entity/user_info.rs b/src/entity/user_info.rs
index b9eb759dd28e21f288cb5233ae4f2489831ca7d9..bf7eea65333e43af32dc2f6ce9729aa94f571121 100644
--- a/src/entity/user_info.rs
+++ b/src/entity/user_info.rs
@@ -1,5 +1,6 @@
 use sea_orm::entity::prelude::*;
 use serde::{Deserialize, Serialize};
+use chrono::{DateTime, FixedOffset};
 
 #[derive(Clone, Debug, PartialEq, Eq, Hash, DeriveEntityModel, Deserialize, Serialize)]
 #[sea_orm(table_name = "user_info")]
@@ -9,19 +10,22 @@ pub struct Model {
     pub uid: i64,
     pub email: String,
     pub nickname: String,
-    pub avatar_url: Option<String>,
-    pub color_schema: String,
+    pub avatar_base64: String,
+    pub color_scheme: String,
     pub list_style: String,
     pub language: String,
     pub password: String,
 
     #[serde(skip_deserializing)]
-    pub created_at: Date,
+    pub last_login_at: DateTime<FixedOffset>,
     #[serde(skip_deserializing)]
-    pub updated_at: Date,
+    pub created_at: DateTime<FixedOffset>,
+    #[serde(skip_deserializing)]
+    pub updated_at: DateTime<FixedOffset>,
 }
 
 #[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
 pub enum Relation {}
 
+
 impl ActiveModelBehavior for ActiveModel {}
\ No newline at end of file
diff --git a/src/main.rs b/src/main.rs
index e1149163e76f4546dc85e608459c9ca8acec6bca..94e9a6ffd071efb97c0e0964c947b150e8db20dc 100644
--- a/src/main.rs
+++ b/src/main.rs
@@ -98,16 +98,25 @@ fn init(cfg: &mut web::ServiceConfig) {
     cfg.service(api::kb_info::delete);
     cfg.service(api::kb_info::list);
     cfg.service(api::kb_info::add_docs_to_kb);
+<<<<<<< HEAD
+    cfg.service(api::kb_info::anti_kb_docs);
+    cfg.service(api::kb_info::all_relevents);
+=======
+>>>>>>> upstream/main
 
     cfg.service(api::doc_info::list);
     cfg.service(api::doc_info::delete);
     cfg.service(api::doc_info::mv);
     cfg.service(api::doc_info::upload);
+    cfg.service(api::doc_info::new_folder);
+    cfg.service(api::doc_info::rename);
 
     cfg.service(api::dialog_info::list);
     cfg.service(api::dialog_info::delete);
-    cfg.service(api::dialog_info::detail);
     cfg.service(api::dialog_info::create);
+    cfg.service(api::dialog_info::update_history);
 
     cfg.service(api::user_info::login);
+    cfg.service(api::user_info::register);
+    cfg.service(api::user_info::setting);
 }
\ No newline at end of file
diff --git a/src/service/dialog_info.rs b/src/service/dialog_info.rs
index c4bddc978a75cd862f84337c831d9b0168cc9b66..08a58ed653953d76ca2609974dcb8f9e638bf76e 100644
--- a/src/service/dialog_info.rs
+++ b/src/service/dialog_info.rs
@@ -1,11 +1,15 @@
-use chrono::Local;
-use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
+use chrono::{Local, FixedOffset, Utc};
+use migration::Expr;
+use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, UpdateResult};
 use sea_orm::ActiveValue::Set;
 use sea_orm::QueryFilter;
 use sea_orm::ColumnTrait;
 use crate::entity::dialog_info;
 use crate::entity::dialog_info::Entity;
 
+fn now()->chrono::DateTime<FixedOffset>{
+    Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
+}
 pub struct Query;
 
 impl Query {
@@ -20,6 +24,7 @@ impl Query {
     pub async fn find_dialog_infos_by_uid(db: &DbConn, uid: i64) -> Result<Vec<dialog_info::Model>, DbErr> {
         Entity::find()
             .filter(dialog_info::Column::Uid.eq(uid))
+            .filter(dialog_info::Column::IsDeleted.eq(false))
             .all(db)
             .await
     }
@@ -45,15 +50,19 @@ pub struct Mutation;
 impl Mutation {
     pub async fn create_dialog_info(
         db: &DbConn,
-        form_data: dialog_info::Model,
+        uid: i64,
+        kb_id: i64,
+        name: &String
     ) -> Result<dialog_info::ActiveModel, DbErr> {
         dialog_info::ActiveModel {
             dialog_id: Default::default(),
-            uid: Set(form_data.uid.to_owned()),
-            dialog_name: Set(form_data.dialog_name.to_owned()),
-            history: Set(form_data.history.to_owned()),
-            created_at: Set(Local::now().date_naive()),
-            updated_at: Set(Local::now().date_naive()),
+            uid: Set(uid),
+            kb_id: Set(kb_id),
+            dialog_name: Set(name.to_owned()),
+            history: Set("".to_owned()),
+            created_at: Set(now()),
+            updated_at: Set(now()),
+            is_deleted: Default::default()
         }
             .save(db)
             .await
@@ -61,35 +70,25 @@ impl Mutation {
 
     pub async fn update_dialog_info_by_id(
         db: &DbConn,
-        id: i64,
-        form_data: dialog_info::Model,
-    ) -> Result<dialog_info::Model, DbErr> {
-        let dialog_info: dialog_info::ActiveModel = Entity::find_by_id(id)
-            .one(db)
-            .await?
-            .ok_or(DbErr::Custom("Cannot find.".to_owned()))
-            .map(Into::into)?;
-
-        dialog_info::ActiveModel {
-            dialog_id: dialog_info.dialog_id,
-            uid: dialog_info.uid,
-            dialog_name: Set(form_data.dialog_name.to_owned()),
-            history: Set(form_data.history.to_owned()),
-            created_at: Default::default(),
-            updated_at: Set(Local::now().date_naive()),
-        }
-            .update(db)
+        dialog_id: i64,
+        dialog_name:&String,
+        history: &String
+    ) -> Result<UpdateResult, DbErr> {
+        Entity::update_many()
+            .col_expr(dialog_info::Column::DialogName, Expr::value(dialog_name))
+            .col_expr(dialog_info::Column::History, Expr::value(history))
+            .col_expr(dialog_info::Column::UpdatedAt, Expr::value(now()))
+            .filter(dialog_info::Column::DialogId.eq(dialog_id))
+            .exec(db)
             .await
     }
 
-    pub async fn delete_dialog_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
-        let tag: dialog_info::ActiveModel = Entity::find_by_id(kb_id)
-            .one(db)
-            .await?
-            .ok_or(DbErr::Custom("Cannot find.".to_owned()))
-            .map(Into::into)?;
-
-        tag.delete(db).await
+    pub async fn delete_dialog_info(db: &DbConn, dialog_id: i64) -> Result<UpdateResult, DbErr> {
+        Entity::update_many()
+            .col_expr(dialog_info::Column::IsDeleted, Expr::value(true))
+            .filter(dialog_info::Column::DialogId.eq(dialog_id))
+            .exec(db)
+            .await
     }
 
     pub async fn delete_all_dialog_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
diff --git a/src/service/doc_info.rs b/src/service/doc_info.rs
index a97d79581e305f3fd28c0322333db23dce3750db..05478f3421b017e37dcdd3a9031353c7bdf56588 100644
--- a/src/service/doc_info.rs
+++ b/src/service/doc_info.rs
@@ -1,10 +1,15 @@
-use chrono::Local;
-use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement};
+use chrono::{Utc, FixedOffset};
+use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, Unset, Unchanged, ConditionalStatement, QuerySelect, JoinType, RelationTrait, DbBackend, Statement, UpdateResult};
 use sea_orm::ActiveValue::Set;
 use sea_orm::QueryFilter;
-use crate::api::doc_info::Params;
-use crate::entity::{doc2_doc, doc_info, kb_info, tag_info};
+use crate::api::doc_info::ListParams;
+use crate::entity::{doc2_doc, doc_info};
 use crate::entity::doc_info::Entity;
+use crate::service;
+
+fn now()->chrono::DateTime<FixedOffset>{
+    Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
+}
 
 pub struct Query;
 
@@ -24,42 +29,121 @@ impl Query {
             .await
     }
 
-    pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: String) -> Result<Vec<doc_info::Model>, DbErr> {
+    pub async fn find_doc_infos_by_name(db: &DbConn, uid: i64, name: &String, parent_id:Option<i64>) -> Result<Vec<doc_info::Model>, DbErr> {
+        let mut dids = Vec::<i64>::new();
+        if let Some(pid) = parent_id {
+            for d2d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(pid)).all(db).await?{
+                dids.push(d2d.did);
+            }
+        }
+        else{
+            let doc = Entity::find()
+                .filter(doc_info::Column::DocName.eq(name.clone()))
+                .filter(doc_info::Column::Uid.eq(uid))
+                .all(db)
+                .await?;
+            if doc.len() == 0{
+                return Ok(vec![]);
+            }
+            assert!(doc.len()>0);
+            let d2d = doc2_doc::Entity::find().filter(doc2_doc::Column::Did.eq(doc[0].did)).all(db).await?;
+            assert!(d2d.len() <= 1, "Did: {}->{}", doc[0].did, d2d.len());
+            if d2d.len()>0{
+                for d2d_ in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(d2d[0].parent_id)).all(db).await?{
+                    dids.push(d2d_.did);
+                }
+            }
+        }
+        
         Entity::find()
-            .filter(doc_info::Column::DocName.eq(name))
+            .filter(doc_info::Column::DocName.eq(name.clone()))
             .filter(doc_info::Column::Uid.eq(uid))
+            .filter(doc_info::Column::Did.is_in(dids))
+            .filter(doc_info::Column::IsDeleted.eq(false))
             .all(db)
             .await
     }
 
-    pub async fn find_doc_infos_by_params(db: &DbConn, params: Params) -> Result<Vec<doc_info::Model>, DbErr> {
+    pub async fn all_descendent_ids(db: &DbConn, doc_ids: &Vec<i64>) -> Result<Vec<i64>, DbErr> {
+        let mut dids = doc_ids.clone();
+        let mut i:usize = 0;
+        loop {
+            if dids.len() == i {
+                break;
+            }
+            
+            for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
+                dids.push(d.did);
+            }
+            i += 1;
+        }
+        Ok(dids)
+    }
+
+    pub async fn find_doc_infos_by_params(db: &DbConn, params: ListParams) -> Result<Vec<doc_info::Model>, DbErr> {
         // Setup paginator
-        let paginator = Entity::find();
+        let mut sql:String  = "
+        select 
+        a.did,
+        a.uid,
+        a.doc_name,
+        a.location,
+        a.size,
+        a.type,
+        a.created_at,
+        a.updated_at,
+        a.is_deleted
+        from 
+        doc_info as a
+        ".to_owned();
+
+        let mut cond:String = format!(" a.uid={} and a.is_deleted=False ", params.uid);
 
-        // Fetch paginated posts
-        let mut query = paginator
-            .find_with_related(kb_info::Entity);
         if let Some(kb_id) = params.filter.kb_id {
-            query = query.filter(kb_info::Column::KbId.eq(kb_id));
+            sql.push_str(&format!(" inner join kb2_doc on kb2_doc.did = a.did and kb2_doc.kb_id={}", kb_id));
         }
         if let Some(folder_id) = params.filter.folder_id {
-
+            sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", folder_id));
         }
+        // Fetch paginated posts
         if let Some(tag_id) = params.filter.tag_id {
-            query = query.filter(tag_info::Column::Tid.eq(tag_id));
+            let tag = service::tag_info::Query::find_tag_info_by_id(tag_id, &db).await.unwrap().unwrap();
+            if tag.folder_id > 0{
+                sql.push_str(&format!(" inner join doc2_doc on a.did = doc2_doc.did and doc2_doc.parent_id={}", tag.folder_id));
+            }
+            if tag.regx.len()>0{
+                cond.push_str(&format!(" and doc_name ~ '{}'", tag.regx));
+            }
         }
-        if let Some(keywords) = params.filter.keywords {
 
+        if let Some(keywords) = params.filter.keywords {
+            cond.push_str(&format!(" and doc_name like '%{}%'", keywords));
         }
-        Ok(query.order_by_asc(doc_info::Column::Did)
-            .all(db)
-            .await?
-            .into_iter()
-            .map(|(mut doc_info, kb_infos)| {
-                doc_info.kb_infos = kb_infos;
-                doc_info
-            })
-            .collect())
+        if cond.len() > 0{
+            sql.push_str(&" where ");
+            sql.push_str(&cond);
+        }
+        let mut orderby = params.sortby.clone();
+        if orderby.len() == 0 {
+            orderby = "updated_at desc".to_owned();
+        }
+        sql.push_str(&format!(" order by {}", orderby));
+        let mut page_size:u32 = 30;
+        if let Some(pg_sz) = params.per_page {
+            page_size = pg_sz;
+        }
+        let mut page:u32 = 0;
+        if let Some(pg) = params.page {
+            page = pg;
+        }
+        sql.push_str(&format!(" limit {} offset {} ;", page_size, page*page_size));
+        
+        print!("{}", sql);
+        Entity::find()
+        .from_raw_sql(
+            Statement::from_sql_and_values(DbBackend::Postgres,sql,vec![])
+        ).all(db).await
+
     }
 
     pub async fn find_doc_infos_in_page(
@@ -126,11 +210,10 @@ impl Mutation {
             doc_name: Set(form_data.doc_name.to_owned()),
             size: Set(form_data.size.to_owned()),
             r#type: Set(form_data.r#type.to_owned()),
-            kb_progress: Set(form_data.kb_progress.to_owned()),
-            kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
             location: Set(form_data.location.to_owned()),
-            created_at: Set(Local::now().date_naive()),
-            updated_at: Set(Local::now().date_naive()),
+            created_at: Set(form_data.created_at.to_owned()),
+            updated_at: Set(form_data.updated_at.to_owned()),
+            is_deleted:Default::default()
         }
             .save(db)
             .await
@@ -153,24 +236,50 @@ impl Mutation {
             doc_name: Set(form_data.doc_name.to_owned()),
             size: Set(form_data.size.to_owned()),
             r#type: Set(form_data.r#type.to_owned()),
-            kb_progress: Set(form_data.kb_progress.to_owned()),
-            kb_progress_msg: Set(form_data.kb_progress_msg.to_owned()),
             location: Set(form_data.location.to_owned()),
-            created_at: Default::default(),
-            updated_at: Set(Local::now().date_naive()),
+            created_at: doc_info.created_at,
+            updated_at: Set(now()),
+            is_deleted: Default::default(),
         }
             .update(db)
             .await
     }
 
-    pub async fn delete_doc_info(db: &DbConn, doc_id: i64) -> Result<DeleteResult, DbErr> {
-        let tag: doc_info::ActiveModel = Entity::find_by_id(doc_id)
+    pub async fn delete_doc_info(db: &DbConn, doc_ids: &Vec<i64>) -> Result<UpdateResult, DbErr> {
+        let mut dids = doc_ids.clone();
+        let mut i:usize = 0;
+        loop {
+            if dids.len() == i {
+                break;
+            }
+            let mut doc: doc_info::ActiveModel = Entity::find_by_id(dids[i])
             .one(db)
             .await?
-            .ok_or(DbErr::Custom("Cannot find.".to_owned()))
+            .ok_or(DbErr::Custom(format!("Can't find doc:{}", dids[i])))
             .map(Into::into)?;
+            doc.updated_at =  Set(now());
+            doc.is_deleted = Set(true);
+            let _ = doc.update(db).await?;
+            
+            for d in doc2_doc::Entity::find().filter(doc2_doc::Column::ParentId.eq(dids[i])).all(db).await?{
+                dids.push(d.did);
+            }
+            let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::ParentId.eq(dids[i])).exec(db).await?;
+            let _ = doc2_doc::Entity::delete_many().filter(doc2_doc::Column::Did.eq(dids[i])).exec(db).await?;
+            i += 1;
+        }
+        crate::service::kb_info::Mutation::remove_docs(&db, dids,None).await
+    }
 
-        tag.delete(db).await
+    pub async fn rename(db: &DbConn, doc_id: i64, name: &String) -> Result<doc_info::Model, DbErr> {
+        let mut doc: doc_info::ActiveModel = Entity::find_by_id(doc_id)
+            .one(db)
+            .await?
+            .ok_or(DbErr::Custom(format!("Can't find doc:{}", doc_id)))
+            .map(Into::into)?;
+            doc.updated_at =  Set(now());
+            doc.doc_name = Set(name.clone());
+            doc.update(db).await
     }
 
     pub async fn delete_all_doc_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
diff --git a/src/service/kb_info.rs b/src/service/kb_info.rs
index c20edce41e3753b64fa5a966fee7da1e0a50dd06..090a792770004d28f5bcde51f5e677022ec178cf 100644
--- a/src/service/kb_info.rs
+++ b/src/service/kb_info.rs
@@ -1,10 +1,14 @@
-use chrono::Local;
-use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
+use chrono::{Local, FixedOffset, Utc};
+use migration::Expr;
+use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
 use sea_orm::ActiveValue::Set;
 use crate::entity::kb_info;
 use crate::entity::kb2_doc;
 use crate::entity::kb_info::Entity;
 
+fn now()->chrono::DateTime<FixedOffset>{
+    Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
+}
 pub struct Query;
 
 impl Query {
@@ -30,6 +34,14 @@ impl Query {
             .await
     }
 
+    pub async fn find_kb_by_docs(db: &DbConn, doc_ids: Vec<i64>) -> Result<Vec<kb_info::Model>, DbErr> {
+        let mut kbids = Vec::<i64>::new();
+        for k in kb2_doc::Entity::find().filter(kb2_doc::Column::Did.is_in(doc_ids)).all(db).await?{
+            kbids.push(k.kb_id);
+        }
+        Entity::find().filter(kb_info::Column::KbId.is_in(kbids)).all(db).await
+    }
+
     pub async fn find_kb_infos_in_page(
         db: &DbConn,
         page: u64,
@@ -58,8 +70,9 @@ impl Mutation {
             uid: Set(form_data.uid.to_owned()),
             kb_name: Set(form_data.kb_name.to_owned()),
             icon: Set(form_data.icon.to_owned()),
-            created_at: Set(Local::now().date_naive()),
-            updated_at: Set(Local::now().date_naive()),
+            created_at: Set(now()),
+            updated_at: Set(now()),
+            is_deleted:Default::default()
         }
             .save(db)
             .await
@@ -71,10 +84,20 @@ impl Mutation {
         doc_ids: Vec<i64>
     )-> Result<(), DbErr>  {
         for did in doc_ids{
+            let res = kb2_doc::Entity::find()
+                .filter(kb2_doc::Column::KbId.eq(kb_id))
+                .filter(kb2_doc::Column::Did.eq(did))
+                .all(db)
+                .await?;
+            if res.len()>0{continue;}
             let _ = kb2_doc::ActiveModel {
                 id: Default::default(),
                 kb_id: Set(kb_id),
                 did: Set(did),
+                kb_progress: Set(0.0),
+                kb_progress_msg: Set("".to_owned()),
+                updated_at: Set(now()),
+                is_deleted:Default::default()
             }
                 .save(db)
                 .await?;
@@ -83,6 +106,25 @@ impl Mutation {
         Ok(())
     }
 
+    pub async fn remove_docs(
+        db: &DbConn,
+        doc_ids: Vec<i64>,
+        kb_id: Option<i64>
+    )-> Result<UpdateResult, DbErr>  {
+        let update = kb2_doc::Entity::update_many()
+            .col_expr(kb2_doc::Column::IsDeleted, Expr::value(true))
+            .col_expr(kb2_doc::Column::KbProgress, Expr::value(0))
+            .col_expr(kb2_doc::Column::KbProgressMsg, Expr::value(""))
+            .filter(kb2_doc::Column::Did.is_in(doc_ids));
+        if let Some(kbid) = kb_id{
+            update.filter(kb2_doc::Column::KbId.eq(kbid))
+            .exec(db)
+            .await
+        }else{
+            update.exec(db).await
+        }
+    }
+
     pub async fn update_kb_info_by_id(
         db: &DbConn,
         id: i64,
@@ -99,24 +141,25 @@ impl Mutation {
             uid: kb_info.uid,
             kb_name: Set(form_data.kb_name.to_owned()),
             icon: Set(form_data.icon.to_owned()),
-            created_at: Default::default(),
-            updated_at: Set(Local::now().date_naive()),
+            created_at: kb_info.created_at,
+            updated_at: Set(now()),
+            is_deleted: Default::default()
         }
             .update(db)
             .await
     }
 
     pub async fn delete_kb_info(db: &DbConn, kb_id: i64) -> Result<DeleteResult, DbErr> {
-        let tag: kb_info::ActiveModel = Entity::find_by_id(kb_id)
+        let kb: kb_info::ActiveModel = Entity::find_by_id(kb_id)
             .one(db)
             .await?
             .ok_or(DbErr::Custom("Cannot find.".to_owned()))
             .map(Into::into)?;
 
-        tag.delete(db).await
+            kb.delete(db).await
     }
 
     pub async fn delete_all_kb_infos(db: &DbConn) -> Result<DeleteResult, DbErr> {
         Entity::delete_many().exec(db).await
     }
-}
\ No newline at end of file
+}
diff --git a/src/service/tag_info.rs b/src/service/tag_info.rs
index 42c9397b7af6a6eee306aa806c809e214cb94457..b39e8d4260fc6810246a6f0c4a6aeb02e4d70084 100644
--- a/src/service/tag_info.rs
+++ b/src/service/tag_info.rs
@@ -1,18 +1,24 @@
-use chrono::Local;
-use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder};
+use chrono::{FixedOffset, Utc};
+use sea_orm::{ActiveModelTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryOrder, ColumnTrait, QueryFilter};
 use sea_orm::ActiveValue::Set;
 use crate::entity::tag_info;
 use crate::entity::tag_info::Entity;
 
+fn now()->chrono::DateTime<FixedOffset>{
+    Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
+}
 pub struct Query;
 
 impl Query {
-    pub async fn find_tag_info_by_id(db: &DbConn, id: i64) -> Result<Option<tag_info::Model>, DbErr> {
+    pub async fn find_tag_info_by_id(id: i64, db: &DbConn) -> Result<Option<tag_info::Model>, DbErr> {
         Entity::find_by_id(id).one(db).await
     }
 
-    pub async fn find_tag_infos(db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
-        Entity::find().all(db).await
+    pub async fn find_tags_by_uid(uid:i64, db: &DbConn) -> Result<Vec<tag_info::Model>, DbErr> {
+        Entity::find()
+            .filter(tag_info::Column::Uid.eq(uid))
+            .all(db)
+            .await
     }
 
     pub async fn find_tag_infos_in_page(
@@ -45,9 +51,9 @@ impl Mutation {
             regx: Set(form_data.regx.to_owned()),
             color: Set(form_data.color.to_owned()),
             icon: Set(form_data.icon.to_owned()),
-            dir: Set(form_data.dir.to_owned()),
-            created_at: Set(Local::now().date_naive()),
-            updated_at: Set(Local::now().date_naive()),
+            folder_id: Set(form_data.folder_id.to_owned()),
+            created_at: Set(now()),
+            updated_at: Set(now()),
         }
             .save(db)
             .await
@@ -71,9 +77,9 @@ impl Mutation {
             regx: Set(form_data.regx.to_owned()),
             color: Set(form_data.color.to_owned()),
             icon: Set(form_data.icon.to_owned()),
-            dir: Set(form_data.dir.to_owned()),
+            folder_id: Set(form_data.folder_id.to_owned()),
             created_at: Default::default(),
-            updated_at: Set(Local::now().date_naive()),
+            updated_at: Set(now()),
         }
             .update(db)
             .await
diff --git a/src/service/user_info.rs b/src/service/user_info.rs
index bfb998f4d0902619f830681804398ab3c4bdefec..73636d15c08e4fa3f3e74bd270e04423e9e079cb 100644
--- a/src/service/user_info.rs
+++ b/src/service/user_info.rs
@@ -1,9 +1,13 @@
-use chrono::Local;
-use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder};
+use chrono::{FixedOffset, Utc};
+use migration::Expr;
+use sea_orm::{ActiveModelTrait, ColumnTrait, DbConn, DbErr, DeleteResult, EntityTrait, PaginatorTrait, QueryFilter, QueryOrder, UpdateResult};
 use sea_orm::ActiveValue::Set;
 use crate::entity::user_info;
 use crate::entity::user_info::Entity;
 
+fn now()->chrono::DateTime<FixedOffset>{
+    Utc::now().with_timezone(&FixedOffset::east_opt(3600*8).unwrap())
+}
 pub struct Query;
 
 impl Query {
@@ -44,52 +48,64 @@ pub struct Mutation;
 impl Mutation {
     pub async fn create_user(
         db: &DbConn,
-        form_data: user_info::Model,
+        form_data: &user_info::Model,
     ) -> Result<user_info::ActiveModel, DbErr> {
         user_info::ActiveModel {
             uid: Default::default(),
             email: Set(form_data.email.to_owned()),
             nickname: Set(form_data.nickname.to_owned()),
-            avatar_url: Set(form_data.avatar_url.to_owned()),
-            color_schema: Set(form_data.color_schema.to_owned()),
+            avatar_base64: Set(form_data.avatar_base64.to_owned()),
+            color_scheme: Set(form_data.color_scheme.to_owned()),
             list_style: Set(form_data.list_style.to_owned()),
             language: Set(form_data.language.to_owned()),
             password: Set(form_data.password.to_owned()),
-            created_at: Set(Local::now().date_naive()),
-            updated_at: Set(Local::now().date_naive()),
+            last_login_at: Set(now()),
+            created_at: Set(now()),
+            updated_at: Set(now()),
         }
             .save(db)
             .await
     }
 
-    pub async fn update_tag_by_id(
+    pub async fn update_user_by_id(
         db: &DbConn,
-        id: i64,
-        form_data: user_info::Model,
+        form_data: &user_info::Model,
     ) -> Result<user_info::Model, DbErr> {
-        let user: user_info::ActiveModel = Entity::find_by_id(id)
+        let usr: user_info::ActiveModel = Entity::find_by_id(form_data.uid)
             .one(db)
             .await?
-            .ok_or(DbErr::Custom("Cannot find tag.".to_owned()))
+            .ok_or(DbErr::Custom("Cannot find user.".to_owned()))
             .map(Into::into)?;
 
         user_info::ActiveModel {
-            uid: user.uid,
+            uid: Set(form_data.uid),
             email: Set(form_data.email.to_owned()),
             nickname: Set(form_data.nickname.to_owned()),
-            avatar_url: Set(form_data.avatar_url.to_owned()),
-            color_schema: Set(form_data.color_schema.to_owned()),
+            avatar_base64: Set(form_data.avatar_base64.to_owned()),
+            color_scheme: Set(form_data.color_scheme.to_owned()),
             list_style: Set(form_data.list_style.to_owned()),
             language: Set(form_data.language.to_owned()),
             password: Set(form_data.password.to_owned()),
-            created_at: Default::default(),
-            updated_at: Set(Local::now().date_naive()),
+            updated_at: Set(now()),
+            last_login_at: usr.last_login_at,
+            created_at:usr.created_at,
         }
             .update(db)
             .await
     }
 
-    pub async fn delete_tag(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
+    pub async fn update_login_status(
+        uid: i64,
+        db: &DbConn
+    ) -> Result<UpdateResult, DbErr> {
+        Entity::update_many()
+            .col_expr(user_info::Column::LastLoginAt,  Expr::value(now()))
+            .filter(user_info::Column::Uid.eq(uid))
+            .exec(db)
+            .await
+    }
+
+    pub async fn delete_user(db: &DbConn, tid: i64) -> Result<DeleteResult, DbErr> {
         let tag: user_info::ActiveModel = Entity::find_by_id(tid)
             .one(db)
             .await?
@@ -99,7 +115,7 @@ impl Mutation {
         tag.delete(db).await
     }
 
-    pub async fn delete_all_tags(db: &DbConn) -> Result<DeleteResult, DbErr> {
+    pub async fn delete_all(db: &DbConn) -> Result<DeleteResult, DbErr> {
         Entity::delete_many().exec(db).await
     }
 }
\ No newline at end of file