Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add the backend api #10

Open
wants to merge 31 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
4c4fea2
add the backend api
maloong2022 Sep 8, 2023
191c9f8
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 8, 2023
1f740c0
Update openai.ts
maloong2022 Sep 8, 2023
69c5edc
Merge branch 'backend-development' of https://github.com/The-Creative…
maloong2022 Sep 8, 2023
f978e86
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 8, 2023
d8a54fe
fix the bug about pnpm build and add the messages function
maloong2022 Sep 8, 2023
bbc8d85
Merge branch 'backend-development' of https://github.com/The-Creative…
maloong2022 Sep 8, 2023
09fdc69
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 8, 2023
bce263a
Merge remote-tracking branch 'origin/main' into backend-development
FleetAdmiralJakob Sep 8, 2023
fa444d6
Updated pnpm-lock
FleetAdmiralJakob Sep 8, 2023
4c9373e
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 8, 2023
77d1b87
fixed the TODO
maloong2022 Sep 8, 2023
536f2cc
Merge branch 'backend-development' of https://github.com/The-Creative…
maloong2022 Sep 8, 2023
5b90e7d
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 8, 2023
c548376
Fixed a type error
FleetAdmiralJakob Sep 8, 2023
1377967
Fixed some typos and changed function names
FleetAdmiralJakob Sep 8, 2023
5bbe439
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 8, 2023
4c4bdd3
Added an idea
FleetAdmiralJakob Sep 8, 2023
7f0fa63
Added an idea
FleetAdmiralJakob Sep 9, 2023
93edddc
Added a question
FleetAdmiralJakob Sep 9, 2023
abcc79d
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 9, 2023
f92b2f8
Fixed some problems that Federico mentioned
FleetAdmiralJakob Sep 10, 2023
17dfa66
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 10, 2023
4cddb22
Merge branch 'main' into backend-development
FleetAdmiralJakob Sep 10, 2023
80c713a
Added the API Key Examples
FleetAdmiralJakob Sep 10, 2023
e6e14c3
Updated the error messages of the server
FleetAdmiralJakob Sep 10, 2023
6dba50b
style: Format code with prettier and standardjs
deepsource-autofix[bot] Sep 10, 2023
a6895fa
Removed the OPENAI API Key from the .env.example
FleetAdmiralJakob Sep 10, 2023
33e910b
Merge remote-tracking branch 'origin/backend-development' into backen…
FleetAdmiralJakob Sep 10, 2023
349068b
Changed the logic for the checking if a room is full
FleetAdmiralJakob Sep 10, 2023
3190c66
Merge branch 'main' into backend-development
FleetAdmiralJakob Sep 17, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,8 @@
# Example:
# SERVERVAR="foo"
# NEXT_PUBLIC_CLIENTVAR="bar"

# Deployment used by `npx convex dev`
CONVEX_DEPLOYMENT="..." # team: creative-programmers, project: sketchit-challenge

NEXT_PUBLIC_CONVEX_URL="https://..."
11 changes: 10 additions & 1 deletion convex/_generated/api.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,10 @@ import type {
FilterApi,
FunctionReference,
} from "convex/server";
import type * as messages from "../messages";
import type * as openai from "../openai";
import type * as player from "../player";
import type * as rooms from "../rooms";

/**
* A utility for referencing Convex functions in your app's API.
Expand All @@ -23,7 +27,12 @@ import type {
* const myFunctionReference = api.myModule.myFunction;
* ```
*/
declare const fullApi: ApiFromModules<{}>;
declare const fullApi: ApiFromModules<{
messages: typeof messages;
openai: typeof openai;
player: typeof player;
rooms: typeof rooms;
}>;
export declare const api: FilterApi<
typeof fullApi,
FunctionReference<any, "public">
Expand Down
31 changes: 14 additions & 17 deletions convex/_generated/dataModel.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,29 +9,25 @@
* @module
*/

import { AnyDataModel } from "convex/server";
import type { DataModelFromSchemaDefinition } from "convex/server";
import type { DocumentByName, TableNamesInDataModel } from "convex/server";
import type { GenericId } from "convex/values";

/**
* No `schema.ts` file found!
*
* This generated code has permissive types like `Doc = any` because
* Convex doesn't know your schema. If you'd like more type safety, see
* https://docs.convex.dev/using/schemas for instructions on how to add a
* schema file.
*
* After you change a schema, rerun codegen with `npx convex dev`.
*/
import schema from "../schema";

/**
* The names of all of your Convex tables.
*/
export type TableNames = string;
export type TableNames = TableNamesInDataModel<DataModel>;

/**
* The type of a document stored in Convex.
*
* @typeParam TableName - A string literal type of the table name (like "users").
*/
export type Doc = any;
export type Doc<TableName extends TableNames> = DocumentByName<
DataModel,
TableName
>;

/**
* An identifier for a document in Convex.
Expand All @@ -43,9 +39,10 @@ export type Doc = any;
*
* IDs are just strings at runtime, but this type can be used to distinguish them from other
* strings when type checking.
*
* @typeParam TableName - A string literal type of the table name (like "users").
*/
export type Id<TableName extends TableNames = TableNames> =
GenericId<TableName>;
export type Id<TableName extends TableNames> = GenericId<TableName>;

/**
* A type describing your Convex data model.
Expand All @@ -56,4 +53,4 @@ export type Id<TableName extends TableNames = TableNames> =
* This type is used to parameterize methods like `queryGeneric` and
* `mutationGeneric` to make them type-safe.
*/
export type DataModel = AnyDataModel;
export type DataModel = DataModelFromSchemaDefinition<typeof schema>;
42 changes: 42 additions & 0 deletions convex/messages.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import { query, mutation } from "./_generated/server";
import { v } from "convex/values";

export const list = query({
args: { roomId: v.id("rooms") },
handler: async (ctx, args) => {
// Grab the most recent messages.
const messages = await ctx.db
.query("messages")
.filter((q) => q.eq(q.field("roomId"), args.roomId))
.order("desc")
.collect();
// Reverse the list so that it's in a chronological order.
const messageWithUsername = await Promise.all(
messages.map(async (message) => {
// Find the likes for each message
const player = await ctx.db.get(message.playerId);
// Join the count of likes with the message data
return {
...message,
// Format smileys
username: player?.username,
};
}),
);
// Reverse the list so that it's in chronological order.
return messageWithUsername.reverse();
},
});

// player sends a message in a room
export const send = mutation({
args: { body: v.string(), playerId: v.id("player"), roomId: v.id("rooms") },
handler: async (ctx, args) => {
// Send a new message.
await ctx.db.insert("messages", {
body: args.body,
playerId: args.playerId,
roomId: args.roomId,
});
},
});
72 changes: 72 additions & 0 deletions convex/openai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
"use node";

import { action } from "./_generated/server";
import { v } from "convex/values";
import { api } from "../convex/_generated/api";
import OpenAI from "openai";

// Initialize the OpenAI client with the given API key
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY, // This is also the default, can be omitted
});
// start a game
export const startGames = action({
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Add a check that if the user that sends the the request to start the game is the creator of the room

args: {
topic: v.string(),
numberOfPlayer: v.number(),
roomId: v.id("rooms"),
},
handler: async (ctx, args) => {
try {
const checkTopicResponse = await openai.completions.create({
model: "text-davinci-003",
prompt: `Is the topic ${args.topic} good?
A good topic must be a single or two-word topic like 'animal', 'plants', 'cars', 'celebrities', etc.
Also, a good topic must be a topic from which other words that can be drawn can be found.
Anything that can be suitable for all audiences.
A bad topic is something sexual or harassing or something like "hello" is also a bad topic.
Your answer should just be true or false.`,
});

if (checkTopicResponse.choices[0]?.text.includes("false")) {
return {
message: "The backend got an error: It's a bad topic!",
};
}

const response = await openai.completions.create({
model: "text-davinci-003",
prompt: `Suggest ${args.numberOfPlayer} words for an topic that is '${args.topic}'`,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this prompt is not so specific as it has to be. I just checked it in ChatGPT and the answer is not an array (as expected). You must be very specific, eg:

const prompt = `Give me a list of ${args.nnumberOfPlayer} word related
to the topic "${args.topic}" as a json array format. Don't
include anything else in your response. The json array
must be as this: '{"words": ["word1", "word2", "word3"]}'.`

This prompt is just an example and we're not sure that it works. Maybe you need to test it in ChatGPT and show in this pr some examples of what ChatGPT returned. In your code, you will need to check if the response is in the expected format and try again if it fails. Limit the tries to 10 or something like that to avoid executing infinite calls to openai.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This api I can't test, because I can't use it In China,So if you have the pay key, you can fix this problem

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You can test using ChatGPT from https://chat.openai.com/ , but I'll try to get a key

});

// Pull the message content out of the response
const words = response.choices[0]?.text;
// save the words and the topic into the database
if (words && words.length > 0) {
await ctx.runMutation(api.player.startGames, {
topic: args.topic,
words: words,
roomId: args.roomId,
});
return words;
} else {
console.error("The Open AI return nothing");
return {
message:
"The backend got an error: The maximum amount of users is 8!",
};
}
} catch (error: unknown) {
if (error instanceof OpenAI.APIError) {
console.error("Status", error.status); // e.g. 401
console.error("Message", error.message); // e.g. The authentication token you passed was invalid...
console.error("Error Code", error.code); // e.g. 'invalid_api_key'
console.error("Error Type", error.type); // e.g. 'invalid_request_error'
return { message: "The backend got an error", error: error };
} else {
console.error(error);
return { message: "The backend got an error", error: error };
}
}
},
});
55 changes: 55 additions & 0 deletions convex/player.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { query, mutation } from "./_generated/server";
import { v } from "convex/values";

// start a game
export const startGames = mutation({
args: { topic: v.string(), words: v.string(), roomId: v.id("rooms") },
handler: async (ctx, args) => {
await ctx.db.patch(args.roomId, {
topic: args.topic,
words: args.words,
});
// update the player who in this room
const playerList = await ctx.db
.query("player")
.filter((q) => q.eq(q.field("roomId"), args.roomId))
.collect();
for (let i = 0; i < playerList.length; i++) {
const player = playerList[i];
if (player) {
await ctx.db.patch(player._id, { roomId: args.roomId, score: 0 });
} else {
console.error("The data error");
}
}
},
});

// check the user's word is correct or not
export const checkWordFromUser = mutation({
args: {
playerId: v.id("player"),
wordFromUser: v.string(),
roomId: v.id("rooms"),
},
handler: async (ctx, args) => {
const room = await ctx.db.get(args.roomId);
// Update the score
if (room?.words?.includes(args.wordFromUser)) {
const player = await ctx.db.get(args.playerId);
const newScore = player?.score ? player?.score + 1 : 1;
ctx.db.patch(args.playerId, { score: newScore });
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You have to check if the user have not answered it yet. If you don't check it, I can spam any of the word and get unlimited score. Also, check that the word is not my word. I shouldn't be able to get score by sending my own word.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yes, here can change use your idea, thank you

}
},
});

// get all the players in a room
export const getPlayersByRoomId = query({
args: { _id: v.id("rooms") },
handler: async (ctx, args) => {
await ctx.db
.query("player")
.filter((q) => q.eq(q.field("roomId"), args._id))
.collect();
},
});
50 changes: 50 additions & 0 deletions convex/rooms.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import { query, mutation } from "./_generated/server";
import { v } from "convex/values";

// create room
export const createRoom = mutation({
args: {
username: v.string(),
timeLimit: v.number(),
pairs: v.boolean(),
chatEnabled: v.boolean(),
},
handler: async (ctx, args) => {
const roomName = args.username + "'s Room";
const roomId = await ctx.db.insert("rooms", {
roomName,
timeLimit: args.timeLimit,
pairs: args.pairs,
chatEnabled: args.chatEnabled,
});
const playerId = await ctx.db.insert("player", {
username: args.username,
roomId: roomId,
});
return { playId: playerId, roomId: roomId };
},
});

// let somebody join into room
export const joinRoomByRoomId = mutation({
args: { username: v.string(), roomId: v.id("rooms") },
handler: async (ctx, args) => {
const players = await ctx.db
.query("player")
.filter((q) => q.eq(q.field("roomId"), args.roomId))
.collect();
if (players.length >= 8) {
return {
message: "The backend got an error: The maximum amount of users is 8!",
};
}
await ctx.db.insert("player", {
username: args.username,
roomId: args.roomId,
});
return ctx.db
.query("player")
.filter((q) => q.eq(q.field("roomId"), args.roomId))
.collect();
},
});
23 changes: 23 additions & 0 deletions convex/schema.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import { defineSchema, defineTable } from "convex/server";
import { v } from "convex/values";

export default defineSchema({
player: defineTable({
username: v.string(),
roomId: v.id("rooms"),
score: v.optional(v.number()),
}),
rooms: defineTable({
roomName: v.string(),
topic: v.optional(v.string()),
words: v.optional(v.string()),
chatEnabled: v.boolean(),
timeLimit: v.number(),
pairs: v.boolean(),
}),
messages: defineTable({
playerId: v.id("player"),
roomId: v.id("rooms"),
body: v.string(),
}),
});
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"clsx": "^2.0.0",
"convex": "^1.2.1",
"next": "^13.4.2",
"openai": "^4.5.0",
"react": "18.2.0",
"react-dom": "18.2.0",
"zod": "^3.21.4"
Expand Down
Loading