openchatai / opencopilot Goto Github PK
View Code? Open in Web Editor NEW🤖 🔥 Language-to-actions engine
Home Page: https://opencopilot.so
License: MIT License
🤖 🔥 Language-to-actions engine
Home Page: https://opencopilot.so
License: MIT License
Error viewing app in browser:
Make log is as follows:
opencopilot-dashboard-1 | AxiosError: connect ECONNREFUSED 127.0.0.1:8888
opencopilot-dashboard-1 | at AxiosError.from (/app/.next/server/chunks/1522.js:11:11992)
opencopilot-dashboard-1 | at RedirectableRequest.<anonymous> (/app/.next/server/chunks/1522.js:13:8687)
opencopilot-dashboard-1 | at RedirectableRequest.emit (node:events:517:28)
opencopilot-dashboard-1 | at u.<computed> (/app/.next/server/chunks/1522.js:1:14914)
opencopilot-dashboard-1 | at ClientRequest.emit (node:events:517:28)
opencopilot-dashboard-1 | at Socket.socketErrorListener (node:_http_client:501:9)
opencopilot-dashboard-1 | at Socket.emit (node:events:517:28)
opencopilot-dashboard-1 | at emitErrorNT (node:internal/streams/destroy:151:8)
opencopilot-dashboard-1 | at emitErrorCloseNT (node:internal/streams/destroy:116:3)
opencopilot-dashboard-1 | at process.processTicksAndRejections (node:internal/process/task_queues:82:21) {
opencopilot-dashboard-1 | port: 8888,
opencopilot-dashboard-1 | address: '127.0.0.1',
opencopilot-dashboard-1 | syscall: 'connect',
opencopilot-dashboard-1 | code: 'ECONNREFUSED',
opencopilot-dashboard-1 | errno: -111,
opencopilot-dashboard-1 | config: {
opencopilot-dashboard-1 | transitional: {
opencopilot-dashboard-1 | silentJSONParsing: true,
opencopilot-dashboard-1 | forcedJSONParsing: true,
opencopilot-dashboard-1 | clarifyTimeoutError: false
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | adapter: [ 'xhr', 'http' ],
opencopilot-dashboard-1 | transformRequest: [ [Function (anonymous)] ],
opencopilot-dashboard-1 | transformResponse: [ [Function (anonymous)] ],
opencopilot-dashboard-1 | timeout: 0,
opencopilot-dashboard-1 | xsrfCookieName: 'XSRF-TOKEN',
opencopilot-dashboard-1 | xsrfHeaderName: 'X-XSRF-TOKEN',
opencopilot-dashboard-1 | maxContentLength: -1,
opencopilot-dashboard-1 | maxBodyLength: -1,
opencopilot-dashboard-1 | env: { FormData: [Function], Blob: [class Blob] },
opencopilot-dashboard-1 | validateStatus: [Function: validateStatus],
opencopilot-dashboard-1 | headers: Object [AxiosHeaders] {
opencopilot-dashboard-1 | Accept: 'application/json, text/plain, */*',
opencopilot-dashboard-1 | 'Content-Type': undefined,
opencopilot-dashboard-1 | 'User-Agent': 'axios/1.6.0',
opencopilot-dashboard-1 | 'Accept-Encoding': 'gzip, compress, deflate, br'
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | baseURL: 'http://localhost:8888/backend/copilot',
opencopilot-dashboard-1 | method: 'get',
opencopilot-dashboard-1 | url: '/',
opencopilot-dashboard-1 | data: undefined
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | request: <ref *1> Writable {
opencopilot-dashboard-1 | _writableState: WritableState {
opencopilot-dashboard-1 | objectMode: false,
opencopilot-dashboard-1 | highWaterMark: 16384,
opencopilot-dashboard-1 | finalCalled: false,
opencopilot-dashboard-1 | needDrain: false,
opencopilot-dashboard-1 | ending: false,
opencopilot-dashboard-1 | ended: false,
opencopilot-dashboard-1 | finished: false,
opencopilot-dashboard-1 | destroyed: false,
opencopilot-dashboard-1 | decodeStrings: true,
opencopilot-dashboard-1 | defaultEncoding: 'utf8',
opencopilot-dashboard-1 | length: 0,
opencopilot-dashboard-1 | writing: false,
opencopilot-dashboard-1 | corked: 0,
opencopilot-dashboard-1 | sync: true,
opencopilot-dashboard-1 | bufferProcessing: false,
opencopilot-dashboard-1 | onwrite: [Function: bound onwrite],
opencopilot-dashboard-1 | writecb: null,
opencopilot-dashboard-1 | writelen: 0,
opencopilot-dashboard-1 | afterWriteTickInfo: null,
opencopilot-dashboard-1 | buffered: [],
opencopilot-dashboard-1 | bufferedIndex: 0,
opencopilot-dashboard-1 | allBuffers: true,
opencopilot-dashboard-1 | allNoop: true,
opencopilot-dashboard-1 | pendingcb: 0,
opencopilot-dashboard-1 | constructed: true,
opencopilot-dashboard-1 | prefinished: false,
opencopilot-dashboard-1 | errorEmitted: false,
opencopilot-dashboard-1 | emitClose: true,
opencopilot-dashboard-1 | autoDestroy: true,
opencopilot-dashboard-1 | errored: null,
opencopilot-dashboard-1 | closed: false,
opencopilot-dashboard-1 | closeEmitted: false,
opencopilot-dashboard-1 | [Symbol(kOnFinished)]: []
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | _events: [Object: null prototype] {
opencopilot-dashboard-1 | response: [Function (anonymous)],
opencopilot-dashboard-1 | error: [Function (anonymous)],
opencopilot-dashboard-1 | socket: [Function (anonymous)]
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | _eventsCount: 3,
opencopilot-dashboard-1 | _maxListeners: undefined,
opencopilot-dashboard-1 | _options: {
opencopilot-dashboard-1 | maxRedirects: 21,
opencopilot-dashboard-1 | maxBodyLength: Infinity,
opencopilot-dashboard-1 | protocol: 'http:',
opencopilot-dashboard-1 | path: '/backend/copilot/',
opencopilot-dashboard-1 | method: 'GET',
opencopilot-dashboard-1 | headers: [Object: null prototype],
opencopilot-dashboard-1 | agents: [Object],
opencopilot-dashboard-1 | auth: undefined,
opencopilot-dashboard-1 | family: undefined,
opencopilot-dashboard-1 | beforeRedirect: [Function: dispatchBeforeRedirect],
opencopilot-dashboard-1 | beforeRedirects: [Object],
opencopilot-dashboard-1 | hostname: 'localhost',
opencopilot-dashboard-1 | port: '8888',
opencopilot-dashboard-1 | agent: undefined,
opencopilot-dashboard-1 | nativeProtocols: [Object],
opencopilot-dashboard-1 | pathname: '/backend/copilot/'
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | _ended: true,
opencopilot-dashboard-1 | _ending: true,
opencopilot-dashboard-1 | _redirectCount: 0,
opencopilot-dashboard-1 | _redirects: [],
opencopilot-dashboard-1 | _requestBodyLength: 0,
opencopilot-dashboard-1 | _requestBodyBuffers: [],
opencopilot-dashboard-1 | _onNativeResponse: [Function (anonymous)],
opencopilot-dashboard-1 | _currentRequest: ClientRequest {
opencopilot-dashboard-1 | _events: [Object: null prototype],
opencopilot-dashboard-1 | _eventsCount: 7,
opencopilot-dashboard-1 | _maxListeners: undefined,
opencopilot-dashboard-1 | outputData: [],
opencopilot-dashboard-1 | outputSize: 0,
opencopilot-dashboard-1 | writable: true,
opencopilot-dashboard-1 | destroyed: true,
opencopilot-dashboard-1 | _last: true,
opencopilot-dashboard-1 | chunkedEncoding: false,
opencopilot-dashboard-1 | shouldKeepAlive: false,
opencopilot-dashboard-1 | maxRequestsOnConnectionReached: false,
opencopilot-dashboard-1 | _defaultKeepAlive: true,
opencopilot-dashboard-1 | useChunkedEncodingByDefault: false,
opencopilot-dashboard-1 | sendDate: false,
opencopilot-dashboard-1 | _removedConnection: false,
opencopilot-dashboard-1 | _removedContLen: false,
opencopilot-dashboard-1 | _removedTE: false,
opencopilot-dashboard-1 | strictContentLength: false,
opencopilot-dashboard-1 | _contentLength: 0,
opencopilot-dashboard-1 | _hasBody: true,
opencopilot-dashboard-1 | _trailer: '',
opencopilot-dashboard-1 | finished: true,
opencopilot-dashboard-1 | _headerSent: true,
opencopilot-dashboard-1 | _closed: true,
opencopilot-dashboard-1 | socket: [Socket],
opencopilot-dashboard-1 | _header: 'GET /backend/copilot/ HTTP/1.1\r\n' +
opencopilot-dashboard-1 | 'Accept: application/json, text/plain, */*\r\n' +
opencopilot-dashboard-1 | 'User-Agent: axios/1.6.0\r\n' +
opencopilot-dashboard-1 | 'Accept-Encoding: gzip, compress, deflate, br\r\n' +
opencopilot-dashboard-1 | 'Host: localhost:8888\r\n' +
opencopilot-dashboard-1 | 'Connection: close\r\n' +
opencopilot-dashboard-1 | '\r\n',
opencopilot-dashboard-1 | _keepAliveTimeout: 0,
opencopilot-dashboard-1 | _onPendingData: [Function: nop],
opencopilot-dashboard-1 | agent: [Agent],
opencopilot-dashboard-1 | socketPath: undefined,
opencopilot-dashboard-1 | method: 'GET',
opencopilot-dashboard-1 | maxHeaderSize: undefined,
opencopilot-dashboard-1 | insecureHTTPParser: undefined,
opencopilot-dashboard-1 | joinDuplicateHeaders: undefined,
opencopilot-dashboard-1 | path: '/backend/copilot/',
opencopilot-dashboard-1 | _ended: false,
opencopilot-dashboard-1 | res: null,
opencopilot-dashboard-1 | aborted: false,
opencopilot-dashboard-1 | timeoutCb: null,
opencopilot-dashboard-1 | upgradeOrConnect: false,
opencopilot-dashboard-1 | parser: null,
opencopilot-dashboard-1 | maxHeadersCount: null,
opencopilot-dashboard-1 | reusedSocket: false,
opencopilot-dashboard-1 | host: 'localhost',
opencopilot-dashboard-1 | protocol: 'http:',
opencopilot-dashboard-1 | _redirectable: [Circular *1],
opencopilot-dashboard-1 | [Symbol(kCapture)]: false,
opencopilot-dashboard-1 | [Symbol(kBytesWritten)]: 0,
opencopilot-dashboard-1 | [Symbol(kNeedDrain)]: false,
opencopilot-dashboard-1 | [Symbol(corked)]: 0,
opencopilot-dashboard-1 | [Symbol(kOutHeaders)]: [Object: null prototype],
opencopilot-dashboard-1 | [Symbol(errored)]: null,
opencopilot-dashboard-1 | [Symbol(kHighWaterMark)]: 16384,
opencopilot-dashboard-1 | [Symbol(kRejectNonStandardBodyWrites)]: false,
opencopilot-dashboard-1 | [Symbol(kUniqueHeaders)]: null
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | _currentUrl: 'http://localhost:8888/backend/copilot/',
opencopilot-dashboard-1 | [Symbol(kCapture)]: false
opencopilot-dashboard-1 | },
opencopilot-dashboard-1 | cause: Error: connect ECONNREFUSED 127.0.0.1:8888
opencopilot-dashboard-1 | at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1555:16)
opencopilot-dashboard-1 | at TCPConnectWrap.callbackTrampoline (node:internal/async_hooks:130:17) {
opencopilot-dashboard-1 | errno: -111,
opencopilot-dashboard-1 | code: 'ECONNREFUSED',
opencopilot-dashboard-1 | syscall: 'connect',
opencopilot-dashboard-1 | address: '127.0.0.1',
opencopilot-dashboard-1 | port: 8888
opencopilot-dashboard-1 | }
opencopilot-dashboard-1 | }
opencopilot-dashboard-1 | [Error: An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error.] {
opencopilot-dashboard-1 | digest: '2385381294'
opencopilot-dashboard-1 | }
opencopilot-nginx-1 | 192.168.65.1 - - [29/Nov/2023:06:51:01 +0000] "GET / HTTP/1.1" 200 11761 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36"
opencopilot-nginx-1 | 2023/11/29 06:51:01 [error] 29#29: *50 connect() failed (111: Connection refused) while connecting to upstream, client: 192.168.65.1, server: , request: "GET /backend/copilot/ HTTP/1.1", upstream: "http://172.21.0.10:8002/backend/copilot/", host: "localhost:8888", referrer: "http://127.0.0.1:8888/"
opencopilot-nginx-1 | 192.168.65.1 - - [29/Nov/2023:06:51:01 +0000] "GET /backend/copilot/ HTTP/1.1" 502 559 "http://127.0.0.1:8888/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36"
/app/app.py:68: UserWarning: name 'text' is not defined
Line 66 in ee47f1d
Add How can deploy this code is azure in readme file as windows,
how can we run locally in windows.
Is your feature request related to a problem? Please describe.
I would like OpenCopilot to help me with writing code e.g. updating existing code to be compatible with a new SDK
Describe the solution you'd like
I would like to paste in my original code from version 1.x of my SDK and then get OpenCopilot to return me an altered version of that original code that will work with version 2.x of my SDK
Describe alternatives you've considered
I asked it to read a documentation web page and provide me with an updated version of the code. It said it can not directly access or browse the internet and that I should provide the OpenCopilot with some specific code samples instead.
Additional context
I tried to paste in raw Rust code but OpenCopilot chat input box was not able to determine what that was. I then used a codeblock (like you would use in Markdown e.g. code goes here
but that did not work).
How do you provide code to the chat bot?
No need to load the swagger file every time a new request comes in.
## TODO: Implement caching for the swagger file content (no need to load it everytime)
@app.route("/handle", methods=["POST", "OPTIONS"])
def handle() -> Response:
data = request.get_json()
try:
response = handle_request(data)
return jsonify(response)
except Exception as e:
return jsonify({"error": str(e)})
In:
llm-server/app.py:19
"An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error."
Allow users to provide external files (PDF, Texts) and use them with Copilot as knowledge sources. This feature will improve Copilot's ability to work with different data sources.
Hello!
This project is really amazing, but our company's server can not access the external network, can only call the local deployment of open source large model, I am now using the local model is ChatGLM3, please support!
ChatGLM is the best Chinese model and has a large number of users
Instead only support open AI for now, It would be nice to support Llama 2 and/or other LLMs.
Two pdf knowledge base files were uploaded, but were never invoked. What is the problem that the context does not get the relevant information?
the marketing website, the readme claim to be opensource however the license clearly states:
- The Licensee may not distribute, sublicense, sell, or resell the Software, in whole or in part, without explicit written permission from the Licensor.
the rest of the license file looks like it was lifted from MIT.
so which is it? MIT or something else?
After Addding /backend/pilot.js script to my website. $ becomes undefined specifically $.ui becomes undefined
Enable a hot reload feature for local development (docker), allowing developers to see instant changes without restarting the application. This will significantly improve the development workflow and productivity.
Develop software development kits (SDKs) for the project to simplify integration and usage by external developers. SDKs provide a convenient way for developers to interact with the project's functionality in their preferred programming languages.
Outfit all Copilots with LangSmith to show you how they think. This helps you fine-tune your prompts for better results.
I successfully uploaded a pdf knowledge document, but did not invoke the knowledge base while using opencopilot.
Add a feature to let users see their Copilot interaction history. This will help you understand how people use Copilot.
Lenovo Thinkpad E590, 32GB RAM, plenty of diskspace. EndeavousOS (Arch distro).
The first error I encountered was:
38.70 The conflict is caused by:
38.70 The user requested langsmith==0.0.5
38.70 langchain 0.0.236 depends on langsmith<0.0.11 and >=0.0.10
So I changed the version for langsmith
It then seemed successful in its installation
until this part
Describe the solution you'd like
A clear and concise description of what you want to happen.
Hello,
The application has been deployed in my machine, and there was no error but a few warning during deployment. I created a demo of pet shop, and tryied to chat with the OpenPilot. When I asked some questions just like, what is the highest mountain in Earth, the copilot gave the answer quickly. But if I answer questions related to the pet shop, the response is a error message: Error sending the message, Shown as below. And if I sent new message, there was no response. seems one of components does'nt work.
Is there any tips for this issue? Thanks a lot!
Environment:
Ubuntu 20.04.6 LTS
16G RAM
Docker 24.0.6
Is there a way to ingest an entire website, for example based on a site map file.
Or can you please tell me the API and point for submitting a single HTML page, and I can write the web crawler myself and pass each page into the system.
Is your feature request related to a problem? Please describe.
I know i'm jumping the gun here, but are there plans to support GraphQL?
Describe the solution you'd like
Put in the GraphQL introspection URL, and then index the docs from there.
Describe alternatives you've considered
converting graphql to swagger json file? idk if it'll be any good, every request would be a post request
Additional context
None
Describe the bug
access url http://localhost:8888/app
"An error occurred in the Server Components render. The specific message is omitted in production builds to avoid leaking sensitive details. A digest property is included on this error instance which may provide additional details about the nature of the error."
backend error
opencopilot-backend-1 |
opencopilot-backend-1 | INFO Server running on [http://0.0.0.0:5000].
opencopilot-backend-1 |
opencopilot-backend-1 | Press Ctrl+C to stop the server
opencopilot-backend-1 |
opencopilot-backend-1 | 2023-09-20 02:11:47 ................................................... ~ 0s
Failed_ to connect to localhost port 5000 after 5 ms: Couldn't connect to server
btw: Where is dashboard/.env.example?
cp -n dashboard/.env.example dashboard/.env 2>/dev/null || true
Is your feature request related to a problem? Please describe.
The current method of making API calls and preserving the application state may result in a linear increase in context as more objects are introduced. Our goal is to prevent this and ensure that the context remains consistent, regardless of the volume of data in the state.
Describe the solution you'd like
We propose utilizing a vector database to construct an entity graph, accompanied by an agent capable of rapidly crafting queries, traversing the graph, and delivering the desired outcomes.
Describe alternatives you've considered
There are no alternative approaches under consideration at this time.
Additional context
We plan to explore multiple graph database options, with the initial choice being intradb.
Currently, hasMultipleIntents
is using conditional statement to tell if the user request contain multi intents (require multiple API calls to get done).
Might be better to use OpenAI function to do that, something like:
Given these endpoints and their descriptions, please answer the question at the end:
getPets: to get all pets
createPet: to create a new pet
delete pet: delete a single pet
The question: "please delete the last pet"
The LLM should spit something like "yes" or any thing similar.
# llm-server/routes/root_service.py:76
try:
if hasMultipleIntents(text):
return run_workflow(
WorkflowData(text, swagger_text, headers, server_base_url)
)
except Exception as e:
print(e)
Styling issue:
When I add the react component for open-copiloit then it makes other component styling messed up, also adds boarder n stuff to things.
Here is how it should look
and here how it looks with component
aside from adding the component, I havent changed anything else.
`
"use client";
import Widget from "../_components/copilot/copilot";
import Nav from "./navbar-ver";
import React from "react";
export default function Layout({ children }: { children: React.ReactNode }) {
return (
<div className="relative flex ">
<Nav />
<div className="w-full"> {children}</div>
<Widget />
</div>
);
}
import { CopilotWidget, Root } from "@openchatai/copilot-widget"; // import the component
import "@openchatai/copilot-widget/index.css"; // the required styles
const options = {
apiUrl: "https://cloud.opencopilot.so/backend",
initialMessage: "How da?",
token: "fafaf",
triggerSelector: "#triggerSelector",
headers: {
// optional: you can pass your authentication tokens to the copilot or any other header you want to send with every request
Authorization: "Bearer your_auth_token_goes_here",
AnyKey: "AnyValue",
},
user: {
name: "Default User",
},
};
export default function Widget() {
return (
<Root options={options}>
<CopilotWidget triggerSelector="#copilot-trigger" />
</Root>
);
}
I am also using tailwind with shadcn ui kit
can use azure openai?
Let Copilot handle complex flows, including making multiple API calls at once and passing data between them. This will take Copilot's capabilities to the next level.
Describe the bug
A clear and concise description of what the bug is.
To Reproduce
Steps to reproduce the behavior:
make install
ERROR: Couldn't find env file: OpenCopilot/llm-server/.env.docker
Expected behavior
Already performed the cp llm-server/.env.example to llm-server/.env
but it seems to also want an .env.docker
.
Screenshots
Desktop (please complete the following information):
Is there an example docker env file to use or do we just copy the .env.example
again?:
$ cp llm-server/.env.example llm-server/.env.docker
$ ls -la llm-server/
.env
.env.example
.env.docker
The link provided for downloading the Swagger file that is used for the Pet Store Demo is not functional. When attempting to access the link, it redirects to the current page instead of initiating the download of the Swagger file. This prevents users from obtaining the necessary Swagger file to follow the Pet Store Demo tutorial effectively.
This issue impacts the accessibility of important resources and hinders the learning experience for users who are trying to understand and utilize the Pet Store Demo. I kindly request the repository maintainers to investigate and rectify this broken link issue at the your earliest convenience.
Thank you for your attention to this matter. I appreciate your efforts in maintaining the repository's documentation and resources for a seamless user experience.
Best regards,
Stefan Shipinkoski
Describe the bug
Hi. I'm consistently getting "Workflow Name" as "None" in the API response according to bot even though no concept like this exists in my API, although it looks like something that is the part of Open Copilot when checking the code.
Here is an example screenshot, although the same thing happens in different scenarios.
Here is the config for the widget:
<script src="http://localhost:8888/pilot.js"></script>
<script> // be aware to call this function when the document/window is ready.
const options = {
apiUrl: "http://localhost:8888/backend", // your base url where your are hosting OpenCopilot at (the API), usually it's http://localhost:5000/api
// initialMessages: ["How are the things"], // optional: you can pass an array of messages that will be sent to the copilot when it's initialized
token: "MYTOKEN",
triggerSelector: "#triggerSelector", // the selector of the element that will trigger the copilot when clicked
headers: {
// optional: you can pass your authentication tokens to the copilot or any other header you want to send with every request
Authorization: "Bearer THIS IS SECRET",
},
user:{
name: "user"
}
}
window.addEventListener("DOMContentLoaded", ()=>initAiCoPilot(options)); // window.onload
</script>
<span id="triggerSelector">Click me</span>
What's the source of the issue?
Enhance the complex flows feature by introducing a user-friendly UI for creating workflows. This will simplify the process of building complex sequences in Copilot.
Describe the bug
A reoccuring error after make install
or make restart
mysql_1 | exec /usr/local/bin/docker-entrypoint.sh: exec format error
To Reproduce
Steps to reproduce the behavior:
OPENAI_API_KEY
to that llm-server/.env fileExpected behavior
To see localhost:8888 working
Desktop (please complete the following information):
create new flow error:
The front UI did not report an error, but did not generate the corresponding flow. Background logs are as follows:
mongodb | {"t":{"$date":"2023-12-14T03:07:30.004+00:00"},"s":"I", "c":"NETWORK", "id":22943, "ctx":"listener","msg":"Connection accepted","attr":{"remote":"172.21.0.9:52410","uuid":{"uuid":{"$uuid":"037287f1-0212-418b-b6cb-94b6b4006bff"}},"connectionId":9,"connectionCount":9}}
mongodb | {"t":{"$date":"2023-12-14T03:07:30.007+00:00"},"s":"I", "c":"NETWORK", "id":51800, "ctx":"conn9","msg":"client metadata","attr":{"remote":"172.21.0.9:52410","client":"conn9","doc":{"driver":{"name":"PyMongo","version":"4.5.0"},"os":{"type":"Linux","name":"Linux","architecture":"aarch64","version":"6.4.16-linuxkit"},"platform":"CPython 3.9.18.final.0"}}}
mongodb | {"t":{"$date":"2023-12-14T03:07:30.012+00:00"},"s":"I", "c":"ACCESS", "id":6788604, "ctx":"conn9","msg":"Auth metrics report","attr":{"metric":"acquireUser","micros":0}}
mongodb | {"t":{"$date":"2023-12-14T03:07:30.023+00:00"},"s":"I", "c":"ACCESS", "id":5286306, "ctx":"conn9","msg":"Successfully authenticated","attr":{"client":"172.21.0.9:52410","isSpeculative":true,"isClusterMember":false,"mechanism":"SCRAM-SHA-256","user":"dbuser","db":"admin","result":0,"metrics":{"conversation_duration":{"micros":10364,"summary":{"0":{"step":1,"step_total":2,"duration_micros":153},"1":{"step":2,"step_total":2,"duration_micros":40}}}},"extraInfo":{}}}
mongodb | {"t":{"$date":"2023-12-14T03:07:30.025+00:00"},"s":"I", "c":"NETWORK", "id":6788700, "ctx":"conn9","msg":"Received first command on ingress connection since session start or auth handshake","attr":{"elapsedMillis":2}}
opencopilot-llm-server-1 | 172.21.0.12 - - [14/Dec/2023 03:07:35] "POST /backend/flows/b/56a24e86-ade4-4b47-a626-9f9435b3654c HTTP/1.0" 201 -
opencopilot-nginx-1 | 192.168.65.1 - - [14/Dec/2023:03:07:35 +0000] "POST /backend/flows/b/56a24e86-ade4-4b47-a626-9f9435b3654c HTTP/1.1" 201 59 "http://localhost:8888/copilot/56a24e86-ade4-4b47-a626-9f9435b3654c/flows" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36"
opencopilot-llm-server-1 | 172.21.0.12 - - [14/Dec/2023 03:07:35] "GET /backend/flows/get/b/56a24e86-ade4-4b47-a626-9f9435b3654c?page=1 HTTP/1.0" 200 -
opencopilot-nginx-1 | 192.168.65.1 - - [14/Dec/2023:03:07:35 +0000] "GET /backend/flows/get/b/56a24e86-ade4-4b47-a626-9f9435b3654c?page=1 HTTP/1.1" 200 61 "http://localhost:8888/copilot/56a24e86-ade4-4b47-a626-9f9435b3654c/flows" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36"
opencopilot-llm-server-1 | 172.21.0.9 - - [14/Dec/2023 03:07:39] "GET /healthcheck HTTP/1.1" 200 -
Make Copilot always ask for your confirmation before doing critical actions, especially when it involves data deletion or other risky operations.
Describe the bug
Kubernetes pods fail to start, multiple pods crashing
To Reproduce
Steps to reproduce the behavior:
kubectl apply -f
Expected behavior
All pods should be running
Kubernetes
Enhance Copilot by providing the capability to update, display, and delete its Swagger file. This feature will give users more control over the Swagger file associated with their Copilot instance.
So how do I run the install from a conda windows terminal?
Describe the bug
I follow the https://docs.opencopilot.so/api-reference/endpoint/workflow/create_workflow to create the worrflow in my linux Open copilot,but get error message ,the result is 404.
There is the result :
curl --location 'http://localhost:8002/workflow/' --header 'Content-Type: application/json' --data @test.json
<!doctype html>
The requested URL was not found on the server. If you entered the URL manually please check your spelling and try again.
Question**
Add the ability for Copilot to remember conversation context. This feature will let users ask for changes to previous requests more easily.
Create an interactive UI to manage backend APIs. You'll be able to add, delete, and tweak APIs easily. Swagger files will still be supported, but not as prominently.
Github: https://github.com/cpacker/MemGPT
Video: https://www.youtube.com/watch?v=QQ2QOPWZKVc
The problem it solves - always stays in context based on stored memory.
I believe this is a critical feature for any LLM-based assistant/copilot.
Is your feature request related to a problem? Please describe.
Many times users are not able to or is not convenient to type. Asking copilot to execute tasks using voice would help a lot.
Describe the solution you'd like
Using voice instead of typing to ask the copilot to execute tasks. There are situations where typing is inconvenient if not prohibitive. Having a voice interface would make copilot usable in such situations. Adding voice to text is also not a big change in the copilot itself, just additional layer to enter commands.
Describe alternatives you've considered
Additional context
A declarative, efficient, and flexible JavaScript library for building user interfaces.
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
An Open Source Machine Learning Framework for Everyone
The Web framework for perfectionists with deadlines.
A PHP framework for web artisans
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
Some thing interesting about web. New door for the world.
A server is a program made to process requests and deliver data to clients.
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
Some thing interesting about visualization, use data art
Some thing interesting about game, make everyone happy.
We are working to build community through open source technology. NB: members must have two-factor auth.
Open source projects and samples from Microsoft.
Google ❤️ Open Source for everyone.
Alibaba Open Source for everyone
Data-Driven Documents codes.
China tencent open source team.