diff --git a/README.md b/README.md index c4237da..efaf395 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,6 @@ doccano-mini is a few-shot annotation tool to assist the development of applications with Large language models (LLMs). Once you annotate a few text, you can test your task (e.g. text classification) with LLMs, then download the [LangChain](https://github.com/hwchase17/langchain)'s config. -![Demo](./docs/images/demo.jpg) - Note: This is an experimental project. ## Installation @@ -28,6 +26,24 @@ doccano-mini Now, we can open the browser and go to `http://localhost:8501/` to see the interface. +### Step1: Annotate a few text + +In this step, we will annotate a few text. We can add a new text by clicking the `+` button. Try it out by double-clicking on any cell. You'll notice you can edit all cell values. + +![Step1](./docs/images/annotation.gif) + +### Step2: Test your task + +In this step, we will test your task. We can enter a new test to the text box and click the `Predict` button. Then, we can see the result of the test. + +![Step2](./docs/images/test_new_example.jpg) + +### Step3: Download the config + +In this step, we will download the [LangChain](https://github.com/hwchase17/langchain)'s config. We can click the `Download` button to download it. + +![Step3](./docs/images/download_config.jpg) + ## Development ```bash diff --git a/doccano_mini/app.py b/doccano_mini/app.py index 2a60a88..3a89d1b 100644 --- a/doccano_mini/app.py +++ b/doccano_mini/app.py @@ -42,9 +42,8 @@ def main(): prompt.prefix = instruction st.header("Test") - text = st.text_area(label="Please enter your text.", value="") - - st.header("Model Options") + col1, col2 = st.columns([3, 1]) + text = col1.text_area(label="Please enter your text.", value="", height=300) # https://platform.openai.com/docs/models/gpt-3-5 available_models = ( @@ -54,13 +53,10 @@ def main(): "text-davinci-002", "code-davinci-002", ) - # Use text-davinci-003 by default. - model_name = st.selectbox("Select an OpenAI model to use.", available_models, index=2) - - temperature = st.slider("Temperature", min_value=0.0, max_value=1.0, value=0.7, step=0.01) - - top_p = st.slider("Top-p", min_value=0.0, max_value=1.0, value=1.0, step=0.01) + model_name = col2.selectbox("Model", available_models, index=2) + temperature = col2.slider("Temperature", min_value=0.0, max_value=1.0, value=0.7, step=0.01) + top_p = col2.slider("Top-p", min_value=0.0, max_value=1.0, value=1.0, step=0.01) if st.button("Predict"): llm = OpenAI(model_name=model_name, temperature=temperature, top_p=top_p) diff --git a/docs/images/annotation.gif b/docs/images/annotation.gif new file mode 100644 index 0000000..de3244f Binary files /dev/null and b/docs/images/annotation.gif differ diff --git a/docs/images/demo.jpg b/docs/images/demo.jpg deleted file mode 100644 index db99a5c..0000000 Binary files a/docs/images/demo.jpg and /dev/null differ diff --git a/docs/images/download_config.jpg b/docs/images/download_config.jpg new file mode 100644 index 0000000..aa297e3 Binary files /dev/null and b/docs/images/download_config.jpg differ diff --git a/docs/images/test_new_example.jpg b/docs/images/test_new_example.jpg new file mode 100644 index 0000000..28f602d Binary files /dev/null and b/docs/images/test_new_example.jpg differ diff --git a/pyproject.toml b/pyproject.toml index 2e62988..1024d67 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "doccano-mini" -version = "0.0.2" +version = "0.0.3" description = "Generate LangChain config quickly" authors = ["Hironsan "] license = "MIT"