From 3fcc9a41050df625abbfe8f6e6a7cd22cdb1f6b1 Mon Sep 17 00:00:00 2001 From: imoxto Date: Sun, 19 Mar 2023 23:45:05 +0600 Subject: [PATCH 1/2] Migrate Prompter, OpenAI and config files to TS --- config/summarization.js | 17 ------- examples/binaryClassificationExample.js | 6 +-- examples/nerExample.js | 6 +-- examples/sqlExample.js | 6 +-- examples/summaryExample.js | 16 +++--- examples/tabularExample.js | 6 +-- examples/turboExample.js | 2 +- package-lock.json | 35 ++++++++++--- package.json | 8 ++- .../config/binaryClassification.ts | 5 ++ config/ner.js => src/config/ner.ts | 6 +++ config/sql.js => src/config/sql.ts | 6 +++ src/config/summarization.ts | 25 +++++++++ config/tabular.js => src/config/tabular.ts | 6 ++- models/openai.js => src/models/openai.ts | 2 +- promptify/index.js => src/promptify/index.ts | 51 ++++++++++++++----- tsconfig.json | 33 ++++++++++++ 17 files changed, 175 insertions(+), 61 deletions(-) delete mode 100644 config/summarization.js rename config/binaryClassification.js => src/config/binaryClassification.ts (87%) rename config/ner.js => src/config/ner.ts (89%) rename config/sql.js => src/config/sql.ts (88%) create mode 100644 src/config/summarization.ts rename config/tabular.js => src/config/tabular.ts (81%) rename models/openai.js => src/models/openai.ts (89%) rename promptify/index.js => src/promptify/index.ts (51%) create mode 100644 tsconfig.json diff --git a/config/summarization.js b/config/summarization.js deleted file mode 100644 index 77a03d4..0000000 --- a/config/summarization.js +++ /dev/null @@ -1,17 +0,0 @@ -export const summarization = ({ description = '', examples = [], context = '' }) => { - if (!context) throw new Error('context is required'); - - const _example = - examples && examples.length > 0 - ? `Examples: ${examples.map( - (ex) => `Input ${ex.context} Output [${ex.extracted_ans}]` - )}` - : ''; - return ` - ${description} - You are a highly intelligent and accurate passage summarizing bot. You take the above passage as input and return the summary from the Paragraph. Your output format is only {{ output_format|default("{'Summary' : Extracted Answer}") }} form, no other form. - ${_example} - Context: ${context} - Output: - `; -}; diff --git a/examples/binaryClassificationExample.js b/examples/binaryClassificationExample.js index 05135ad..8ba43a9 100644 --- a/examples/binaryClassificationExample.js +++ b/examples/binaryClassificationExample.js @@ -1,6 +1,6 @@ -import { binaryClassification } from "../config/binaryClassification.js"; -import { OpenAI } from "../models/openai.js"; -import { Prompter } from "../promptify/index.js"; +import { binaryClassification } from "../dist/config/binaryClassification.js"; +import { OpenAI } from "../dist/models/openai.js"; +import { Prompter } from "../dist/promptify/index.js"; import { binaryData } from "../examples/data/binary.js"; const model = OpenAI("api-key"); diff --git a/examples/nerExample.js b/examples/nerExample.js index e0af3da..829b752 100644 --- a/examples/nerExample.js +++ b/examples/nerExample.js @@ -1,6 +1,6 @@ -import { ner } from "../config/ner.js"; -import { OpenAI } from "../models/openai.js"; -import { Prompter } from "../promptify/index.js"; +import { ner } from "../dist/config/ner.js"; +import { OpenAI } from "../dist/models/openai.js"; +import { Prompter } from "../dist/promptify/index.js"; import { nerData } from "../examples/data/optimized_ner.js"; const model = OpenAI("api-key"); diff --git a/examples/sqlExample.js b/examples/sqlExample.js index cf3967f..5ac7126 100644 --- a/examples/sqlExample.js +++ b/examples/sqlExample.js @@ -1,6 +1,6 @@ -import { sql } from "../config/sql.js"; -import { OpenAI } from "../models/openai.js"; -import { Prompter } from "../promptify/index.js"; +import { sql } from "../dist/config/sql.js"; +import { OpenAI } from "../dist/models/openai.js"; +import { Prompter } from "../dist/promptify/index.js"; import { sqlData } from "../examples/data/sql.js"; const model = OpenAI(""); diff --git a/examples/summaryExample.js b/examples/summaryExample.js index a0dbc29..b4085cb 100644 --- a/examples/summaryExample.js +++ b/examples/summaryExample.js @@ -1,15 +1,15 @@ -import { summarization } from '../config/summarization.js'; -import { summaryData } from '../examples/data/summary.js'; -import { OpenAI } from '../models/openai.js'; -import { Prompter } from '../promptify/index.js'; +import { summarization } from "../dist/config/summarization.js"; +import { summaryData } from "../examples/data/summary.js"; +import { OpenAI } from "../dist/models/openai.js"; +import { Prompter } from "../dist/promptify/index.js"; -const model = OpenAI('api-key'); +const model = OpenAI("api-key"); const prompt = summarization({ - examples: summaryData, - context: `Google Chrome is a cross-platform web browser developed by Google. It was first released in 2008 for Microsoft Windows, built with free software components from Apple WebKit and Mozilla Firefox. Versions were later released for Linux, macOS, iOS, and also for Android, where it is the default browser. The browser is also the main component of ChromeOS, where it serves as the platform for web applications. Most of Chrome's source code comes from Google's free and open-source software project Chromium, but Chrome is licensed as proprietary freeware. WebKit was the original rendering engine, but Google eventually forked it to create the Blink engine; all Chrome variants except iOS now use Blink. As of October 2022, StatCounter estimates that Chrome has a 67% worldwide browser market share (after peaking at 72.38% in November 2018) on personal computers (PC), is most used on tablets (having surpassed Safari), and is also dominant on smartphones and at 65% across all platforms combined. Because of this success, Google has expanded the "Chrome" brand name to other products: ChromeOS, Chromecast, Chromebook, Chromebit, Chromebox, and Chromebase.`, + examples: summaryData, + context: `Google Chrome is a cross-platform web browser developed by Google. It was first released in 2008 for Microsoft Windows, built with free software components from Apple WebKit and Mozilla Firefox. Versions were later released for Linux, macOS, iOS, and also for Android, where it is the default browser. The browser is also the main component of ChromeOS, where it serves as the platform for web applications. Most of Chrome's source code comes from Google's free and open-source software project Chromium, but Chrome is licensed as proprietary freeware. WebKit was the original rendering engine, but Google eventually forked it to create the Blink engine; all Chrome variants except iOS now use Blink. As of October 2022, StatCounter estimates that Chrome has a 67% worldwide browser market share (after peaking at 72.38% in November 2018) on personal computers (PC), is most used on tablets (having surpassed Safari), and is also dominant on smartphones and at 65% across all platforms combined. Because of this success, Google has expanded the "Chrome" brand name to other products: ChromeOS, Chromecast, Chromebook, Chromebit, Chromebox, and Chromebase.`, }); -const result = await Prompter(model, prompt, 'text-davinci-003'); +const result = await Prompter(model, prompt, "text-davinci-003"); console.log(result); diff --git a/examples/tabularExample.js b/examples/tabularExample.js index 80e4df5..547e029 100644 --- a/examples/tabularExample.js +++ b/examples/tabularExample.js @@ -1,6 +1,6 @@ -import { tabular } from "../config/tabular.js"; -import { OpenAI } from "../models/openai.js"; -import { Prompter } from "../promptify/index.js"; +import { tabular } from "../dist/config/tabular.js"; +import { OpenAI } from "../dist/models/openai.js"; +import { Prompter } from "../dist/promptify/index.js"; import { tabularData } from "../examples/data/tabular.js"; const model = OpenAI("api-key"); diff --git a/examples/turboExample.js b/examples/turboExample.js index 62e97fb..153fc08 100644 --- a/examples/turboExample.js +++ b/examples/turboExample.js @@ -1,4 +1,4 @@ -import { Prompter } from "../promptify/index.js"; +import { Prompter } from "../dist/promptify/index.js"; const prompt = [ { role: "system", content: "you are an helpful assistat" }, diff --git a/package-lock.json b/package-lock.json index 20b1f03..99fc684 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,12 +17,16 @@ "helmet": "^6.0.1", "mongoose": "^7.0.0", "openai": "^3.1.0" + }, + "devDependencies": { + "@types/node": "^18.15.3", + "typescript": "^5.0.2" } }, "node_modules/@types/node": { - "version": "18.14.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.2.tgz", - "integrity": "sha512-1uEQxww3DaghA0RxqHx0O0ppVlo43pJhepY51OxuQIKHpjbnYLA7vcdwioNPzIqmC2u3I/dmylcqjlh0e7AyUA==" + "version": "18.15.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.3.tgz", + "integrity": "sha512-p6ua9zBxz5otCmbpb5D3U4B5Nanw6Pk3PPyX05xnxbB/fRv71N7CPmORg7uAD5P70T0xmx1pzAx/FUfa5X+3cw==" }, "node_modules/@types/webidl-conversions": { "version": "7.0.0", @@ -952,6 +956,19 @@ "node": ">= 0.6" } }, + "node_modules/typescript": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.2.tgz", + "integrity": "sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=12.20" + } + }, "node_modules/unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", @@ -999,9 +1016,9 @@ }, "dependencies": { "@types/node": { - "version": "18.14.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.14.2.tgz", - "integrity": "sha512-1uEQxww3DaghA0RxqHx0O0ppVlo43pJhepY51OxuQIKHpjbnYLA7vcdwioNPzIqmC2u3I/dmylcqjlh0e7AyUA==" + "version": "18.15.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.15.3.tgz", + "integrity": "sha512-p6ua9zBxz5otCmbpb5D3U4B5Nanw6Pk3PPyX05xnxbB/fRv71N7CPmORg7uAD5P70T0xmx1pzAx/FUfa5X+3cw==" }, "@types/webidl-conversions": { "version": "7.0.0", @@ -1689,6 +1706,12 @@ "mime-types": "~2.1.24" } }, + "typescript": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.2.tgz", + "integrity": "sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==", + "dev": true + }, "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", diff --git a/package.json b/package.json index 3444d09..a679a08 100644 --- a/package.json +++ b/package.json @@ -2,12 +2,14 @@ "name": "promptifyjs", "version": "0.0.1", "description": "Prompt Engineering, Solve NLP Problems with LLM's & Easily generate different NLP Task prompts for popular generative models like GPT, PaLM, and more with Promptify", - "main": "index.js", + "main": "dist/index.js", "type": "module", "directories": { "example": "examples" }, "scripts": { + "dev": "tsc -w", + "build": "tsc --sourceMap false", "test": "echo \"Error: no test specified\" && exit 1" }, "repository": { @@ -32,5 +34,9 @@ "helmet": "^6.0.1", "mongoose": "^7.0.0", "openai": "^3.1.0" + }, + "devDependencies": { + "@types/node": "^18.15.3", + "typescript": "^5.0.2" } } diff --git a/config/binaryClassification.js b/src/config/binaryClassification.ts similarity index 87% rename from config/binaryClassification.js rename to src/config/binaryClassification.ts index eafad7d..16786eb 100644 --- a/config/binaryClassification.js +++ b/src/config/binaryClassification.ts @@ -3,6 +3,11 @@ export const binaryClassification = ({ description = "", labels = "", examples = [], +}: { + text_input?: string; + description?: string; + labels?: string; + examples?: Array<{ text: string; labels: string }>; }) => { if (!text_input) throw new Error("input is required"); diff --git a/config/ner.js b/src/config/ner.ts similarity index 89% rename from config/ner.js rename to src/config/ner.ts index c424e68..8bfb6b6 100644 --- a/config/ner.js +++ b/src/config/ner.ts @@ -4,6 +4,12 @@ export const ner = ({ domain = "", labels = "", examples = [], +}: { + text_input?: string; + description?: string; + domain?: string; + labels?: string; + examples?: Array<{ sentence: string }>; }) => { if (!text_input) throw new Error("input is required"); diff --git a/config/sql.js b/src/config/sql.ts similarity index 88% rename from config/sql.js rename to src/config/sql.ts index 5ecf8df..2b01e62 100644 --- a/config/sql.js +++ b/src/config/sql.ts @@ -4,6 +4,12 @@ export const sql = ({ example_schema = {}, target_schema = {}, examples = [], +}: { + text_input?: string; + description?: string; + example_schema?: any; + target_schema?: any; + examples?: { sentence: string; query: string }[]; }) => { if (!text_input) throw new Error("input is required"); if (!example_schema) throw new Error("example_schema is required"); diff --git a/src/config/summarization.ts b/src/config/summarization.ts new file mode 100644 index 0000000..127e0e0 --- /dev/null +++ b/src/config/summarization.ts @@ -0,0 +1,25 @@ +export const summarization = ({ + description = "", + examples = [], + context = "", +}: { + description?: string; + examples?: { context: string; extracted_ans: string }[]; + context?: string; +}) => { + if (!context) throw new Error("context is required"); + + const _example = + examples && examples.length > 0 + ? `Examples: ${examples.map( + (ex) => `Input ${ex.context} Output [${ex.extracted_ans}]` + )}` + : ""; + return ` + ${description} + You are a highly intelligent and accurate passage summarizing bot. You take the above passage as input and return the summary from the Paragraph. Your output format is only {{ output_format|default("{'Summary' : Extracted Answer}") }} form, no other form. + ${_example} + Context: ${context} + Output: + `; +}; diff --git a/config/tabular.js b/src/config/tabular.ts similarity index 81% rename from config/tabular.js rename to src/config/tabular.ts index 9c6d0f3..40729ca 100644 --- a/config/tabular.js +++ b/src/config/tabular.ts @@ -2,6 +2,10 @@ export const tabular = ({ text_input = "", description = "", examples = [], +}: { + text_input?: string | { text: string }; + description?: string; + examples?: { text: string; tabulardata: string }[]; }) => { if (!text_input) throw new Error("input is required"); if (!description) throw new Error("description is required"); @@ -22,7 +26,7 @@ export const tabular = ({ ${_example} Target Input Below: - ${text_input.text} + ${typeof text_input === "string" ? text_input : text_input.text} Output for the target input: `; }; diff --git a/models/openai.js b/src/models/openai.ts similarity index 89% rename from models/openai.js rename to src/models/openai.ts index 6a264e8..dfd9381 100644 --- a/models/openai.js +++ b/src/models/openai.ts @@ -1,7 +1,7 @@ // const { Configuration, OpenAI } = require("openai"); import { Configuration, OpenAIApi } from "openai"; -export const OpenAI = (key) => { +export const OpenAI = (key: string) => { if (!key) throw new Error("Open AI key is required"); // Logic to validate `apiKey` diff --git a/promptify/index.js b/src/promptify/index.ts similarity index 51% rename from promptify/index.js rename to src/promptify/index.ts index 74e34db..6a5975c 100644 --- a/promptify/index.js +++ b/src/promptify/index.ts @@ -1,19 +1,42 @@ import axios from "axios"; +import { OpenAIApi } from "openai"; + +const supportedModels = [ + "text-davinci-003", + "text-curie-001", + "text-babbage-001", + "text-ada-001", + "gpt-3.5-turbo", //requires another implementation, can't be used like the other text completion models +] as const; + +export type SupportedModels = typeof supportedModels[number]; + +export type TurboData = { + id: string; + object: "chat.completion"; + created: Date; + model: "gpt-3.5-turbo-0301"; + usage: { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; + }; + choices: { + message: { + role: "assistant"; + content: string; + }; + finish_reason: "stop"; + index: number; + }[]; +}; export const Prompter = async ( - model, - prompt, - modelType = "text-davinci-003", - apiKey + model: OpenAIApi | undefined, + prompt: string, + modelType: SupportedModels = "text-davinci-003", + apiKey: string ) => { - const supportedModels = [ - "text-davinci-003", - "text-curie-001", - "text-babbage-001", - "text-ada-001", - "gpt-3.5-turbo", //requires another implementation, can't be used like the other text completion models - ]; - if (!supportedModels.includes(modelType)) throw new Error("Model not supported"); if (!modelType) throw new Error("model is required"); //logic for incorrect model @@ -21,7 +44,7 @@ export const Prompter = async ( if (!prompt) throw new Error("incorrect prompt"); //call API to render the prompt ? - if (modelType !== "gpt-3.5-turbo") { + if (model && modelType !== "gpt-3.5-turbo") { const completion = await model.createCompletion({ model: modelType, prompt: prompt, @@ -32,7 +55,7 @@ export const Prompter = async ( } else { const completionURL = "https://api.openai.com/v1/chat/completions"; - const response = await axios.post( + const response = await axios.post( completionURL, { model: modelType, diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..f7161a9 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,33 @@ +{ + "compilerOptions": { + "target": "ESNext", + "module": "ESNext", + "lib": ["ESNext"], + "strict": false, + "skipLibCheck": true, + "sourceMap": true, + "declaration": true, + "moduleResolution": "node", + "noImplicitAny": true, + "strictNullChecks": true, + "strictFunctionTypes": true, + "noImplicitThis": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "emitDecoratorMetadata": true, + "experimentalDecorators": true, + "resolveJsonModule": true, + "incremental": false, + "removeComments": true, + "outDir": "./dist", + "rootDir": "./src", + "baseUrl": "./src", + "watch": false, + "types": ["node"] + }, + "include": ["./src/**/*.ts"] +} From ad07f0f6f70f792182c91fa512e021fda30e3be9 Mon Sep 17 00:00:00 2001 From: imoxto Date: Mon, 20 Mar 2023 12:10:05 +0600 Subject: [PATCH 2/2] add relation extraction type --- examples/relationExtractionExample.js | 19 ++++++++++++++++++ src/config/relationExtraction.ts | 28 +++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 examples/relationExtractionExample.js create mode 100644 src/config/relationExtraction.ts diff --git a/examples/relationExtractionExample.js b/examples/relationExtractionExample.js new file mode 100644 index 0000000..b662705 --- /dev/null +++ b/examples/relationExtractionExample.js @@ -0,0 +1,19 @@ +import { relationExtraction } from "../dist/config/relationExtraction.js"; +import { OpenAI } from "../dist/models/openai.js"; +import { Prompter } from "../dist/promptify/index.js"; +import { relationExtractionData } from "../examples/data/relation_extraction.js"; + +const model = OpenAI("api key"); +const examples = relationExtractionData; + +const prompt = relationExtraction({ + text_input: + "Eren is a Person. Working for XYZ company, his job title is Software Engineer. He lives in Bielefeld, Germany", + description: "Relation Extraction", + domain: "Personal Information", + examples: examples, +}); + +const result = await Prompter(model, prompt, "text-davinci-003"); + +console.log(result); diff --git a/src/config/relationExtraction.ts b/src/config/relationExtraction.ts new file mode 100644 index 0000000..95adce9 --- /dev/null +++ b/src/config/relationExtraction.ts @@ -0,0 +1,28 @@ +export const relationExtraction = ({ + text_input = "", + description = "", + domain = "", + examples = [], +}: { + text_input?: string, + description?: string, + domain?: string, + examples?: { text: string, labels: string }[], +}) => { + const _example = + examples && examples.length > 0 + ? `Examples: ${examples.map( + (ex) => `Input ${ex.text} Output [${ex.labels}] \n` + )}` + : ""; + + return ` + ${description} + You are a highly intelligent and accurate ${domain} domain Resource Description Framework (RDF) data model. You take Passage as input and convert it into ${domain} domain RDF triples. A triple is a set of three entities that codifies a statement about semantic data in the form of subject–predicate–object expressions. + You are a highly intelligent and accurate Resource Description Framework (RDF) data model. You take Passage as input and convert it into RDF triples. A triple is a set of three entities that codifies a statement about semantic data in the form of subject–predicate–object expressions. + Your output format is only {{ output_format|default("[[ subject, predicate, object ], ...]") }} nothing else + ${_example} + Input: ${text_input} + Output: + `; +};