Skip to content

Commit

Permalink
Merge pull request #4 from cloud-py-api/appapi-1.1.0
Browse files Browse the repository at this point in the history
"/init" endpoint implementation
  • Loading branch information
bigcat88 authored Oct 23, 2023
2 parents 8b84a85 + cf7cdef commit 594c9b4
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 34 deletions.
5 changes: 2 additions & 3 deletions .run/TalkBotAI.run.xml
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,12 @@
<env name="APP_PORT" value="10034" />
<env name="APP_SECRET" value="12345" />
<env name="APP_VERSION" value="1.0.0" />
<env name="NEXTCLOUD_URL" value="http://nextcloud.local" />
<env name="NEXTCLOUD_URL" value="http://stable27.local" />
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="SDK_NAME" value="Python 3.11 (talk_bot_ai_example)" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="false" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
Expand Down
20 changes: 10 additions & 10 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@ help:
@echo " run27 install TalkBotAI for Nextcloud 27"
@echo " "
@echo " For development of this example use PyCharm run configurations. Development is always set for last Nextcloud."
@echo " First run 'TalkBotAI' and then 'make manual_register', after that you can use/debug/develop it and easy test."
@echo " First run 'TalkBotAI' and then 'make register', after that you can use/debug/develop it and easy test."
@echo " "
@echo " manual_register28 perform registration of running 'TalkBotAI' into the 'manual_install' deploy daemon."
@echo " manual_register27 perform registration of running 'TalkBotAI' into the 'manual_install' deploy daemon."
@echo " register28 perform registration of running 'TalkBotAI' into the 'manual_install' deploy daemon."
@echo " register27 perform registration of running 'TalkBotAI' into the 'manual_install' deploy daemon."

.PHONY: build-push
build-push:
docker login ghcr.io
docker buildx build --push --platform linux/arm64/v8,linux/amd64 --tag ghcr.io/cloud-py-api/talk_bot_ai_example:1.0.3 --tag ghcr.io/cloud-py-api/talk_bot_ai_example:latest .
docker buildx build --push --platform linux/arm64/v8,linux/amd64 --tag ghcr.io/cloud-py-api/talk_bot_ai_example:1.1.0 --tag ghcr.io/cloud-py-api/talk_bot_ai_example:latest .

.PHONY: deploy28
deploy28:
Expand All @@ -50,16 +50,16 @@ run27:
docker exec master-stable27-1 sudo -u www-data php occ app_api:app:register talk_bot_ai_example docker_dev -e --force-scopes \
--info-xml https://raw.githubusercontent.com/cloud-py-api/talk_bot_ai_example/main/appinfo/info.xml

.PHONY: manual_register28
manual_register28:
.PHONY: register28
register28:
docker exec master-nextcloud-1 sudo -u www-data php occ app_api:app:unregister talk_bot_ai_example --silent || true
docker exec master-nextcloud-1 sudo -u www-data php occ app_api:app:register talk_bot_ai_example manual_install --json-info \
"{\"appid\":\"talk_bot_ai_example\",\"name\":\"TalkBotAI Example\",\"daemon_config_name\":\"manual_install\",\"version\":\"1.0.0\",\"secret\":\"12345\",\"host\":\"host.docker.internal\",\"port\":10034,\"scopes\":{\"required\":[\"TALK\", \"TALK_BOT\"],\"optional\":[]},\"protocol\":\"http\",\"system_app\":0}" \
-e --force-scopes
--force-scopes

.PHONY: manual_register27
manual_register27:
.PHONY: register27
register27:
docker exec master-stable27-1 sudo -u www-data php occ app_api:app:unregister talk_bot_ai_example --silent || true
docker exec master-stable27-1 sudo -u www-data php occ app_api:app:register talk_bot_ai_example manual_install --json-info \
"{\"appid\":\"talk_bot_ai_example\",\"name\":\"TalkBotAI Example\",\"daemon_config_name\":\"manual_install\",\"version\":\"1.0.0\",\"secret\":\"12345\",\"host\":\"host.docker.internal\",\"port\":10034,\"scopes\":{\"required\":[\"TALK\", \"TALK_BOT\"],\"optional\":[]},\"protocol\":\"http\",\"system_app\":0}" \
-e --force-scopes
--force-scopes
4 changes: 2 additions & 2 deletions appinfo/info.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ It shouldn't be too difficult to use a more advanced model based on this example
Refer to [How to install](https://github.com/cloud-py-api/talk_bot_ai_example/blob/main/HOW_TO_INSTALL.md) to try it.
]]></description>
<version>1.0.3</version>
<version>1.1.0</version>
<licence>MIT</licence>
<author mail="andrey18106x@gmail.com" homepage="https://github.com/andrey18106">Andrey Borysenko</author>
<author mail="bigcat88@icloud.com" homepage="https://github.com/bigcat88">Alexander Piskun</author>
Expand All @@ -32,7 +32,7 @@ Refer to [How to install](https://github.com/cloud-py-api/talk_bot_ai_example/bl
<docker-install>
<registry>ghcr.io</registry>

<image-tag>1.0.3</image-tag>
<image-tag>1.1.0</image-tag>
</docker-install>
<scopes>
<required>
Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ httpx>=0.24.1
fastapi>=0.101
uvicorn[standard]>=0.23.2
transformers[torch]>=4.33
tqdm
51 changes: 32 additions & 19 deletions src/main.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,9 @@
"""Example of an application that uses Python Transformers library with Talk Bot APIs."""

import os

# This line should be on top before any import of the "Transformers" library.
os.environ["TRANSFORMERS_CACHE"] = os.environ["APP_PERSISTENT_STORAGE"] # noqa

import asyncio
import dataclasses
import re
from threading import Thread
from typing import Annotated
from base64 import b64encode, b64decode
from random import choice
Expand All @@ -20,13 +15,14 @@
import httpx
import json
import requests
import tqdm
from fastapi import BackgroundTasks, Depends, FastAPI, responses, Request, HTTPException, status
from transformers import pipeline
import uvicorn
from huggingface_hub import snapshot_download

APP = FastAPI()
MODEL_NAME = "MBZUAI/LaMini-Flan-T5-248M"
MODEL_INIT_THREAD = None
BOT_URL = "/ai_talk_bot_example"


Expand Down Expand Up @@ -199,7 +195,10 @@ def ai_talk_bot_process_request(message: TalkBotMessage):
r = re.search(r"@assistant\s(.*)", message.object_content["message"], re.IGNORECASE)
if r is None:
return
model = pipeline("text2text-generation", model=MODEL_NAME)
model = pipeline(
"text2text-generation",
model=snapshot_download(MODEL_NAME, local_files_only=True, cache_dir=os.environ["APP_PERSISTENT_STORAGE"]),
)
response_text = model(r.group(1), max_length=64, do_sample=True)[0]["generated_text"]
send_message(response_text, message)

Expand Down Expand Up @@ -241,21 +240,35 @@ def enabled_handler(enabled: bool, request: Request):
return responses.JSONResponse(content={"error": r}, status_code=200)


def download_models():
pipeline("text2text-generation", model=MODEL_NAME)


@APP.get("/heartbeat")
def heartbeat_handler():
global MODEL_INIT_THREAD
print("heartbeat_handler: called")
if MODEL_INIT_THREAD is None:
MODEL_INIT_THREAD = Thread(target=download_models)
MODEL_INIT_THREAD.start()
print("heartbeat_handler: started initialization thread")
r = "init" if MODEL_INIT_THREAD.is_alive() else "ok"
print(f"heartbeat_handler: result={r}")
return responses.JSONResponse(content={"status": r}, status_code=200)
return responses.JSONResponse(content={"status": "ok"}, status_code=200)


def update_progress_status(progress: int):
ocs_call(
method="PUT",
path=f"/ocs/v1.php/apps/app_api/apps/status/{os.environ['APP_ID']}",
json_data={"progress": progress}
)


def fetch_models_task():
class TqdmProgress(tqdm.tqdm):
def display(self, msg=None, pos=None):
finish_percent = min(int(self.n * 100 / self.total), 100)
update_progress_status(finish_percent)
return super().display(msg, pos)

snapshot_download(MODEL_NAME, cache_dir=os.environ["APP_PERSISTENT_STORAGE"], tqdm_class=TqdmProgress) # noqa
update_progress_status(100)


@APP.post("/init")
def init_handler(background_tasks: BackgroundTasks):
background_tasks.add_task(fetch_models_task)
return responses.JSONResponse(content={}, status_code=200)


if __name__ == "__main__":
Expand Down

0 comments on commit 594c9b4

Please sign in to comment.