[
  {
    "path": ".eslintrc.json",
    "content": "{\n  \"extends\": \"next/core-web-vitals\"\n}\n"
  },
  {
    "path": ".example.env.local",
    "content": "OPENAI_API_KEY=\nREPLICATE_TOKEN="
  },
  {
    "path": ".gitignore",
    "content": "# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.\n\n# dependencies\n/node_modules\n/.pnp\n.pnp.js\n\n# testing\n/coverage\n\n# next.js\n/.next/\n/out/\n\n# production\n/build\n\n# misc\n.DS_Store\n*.pem\n\n# debug\nnpm-debug.log*\nyarn-debug.log*\nyarn-error.log*\n\n# local env files\n.env*.local\n\n# vercel\n.vercel\n\n# typescript\n*.tsbuildinfo\nnext-env.d.ts\n"
  },
  {
    "path": "README.md",
    "content": "## God app\n\nThis is an experimental project using OpenAI GPT Plugins and Replicate to combine all AI APis into one.\n\nInstead of using separate interfaces for image generation, video generation, audio generation, and general text natural language processing, this app combines all of them together into a single prompt.\n\nAPIs currently supported:\n- Text to image\n- Text to video\n- Text to audio\n- General natural language processing\n\nExamples of what you might ask:\n\n- Create a 4 day travel itinerary for Mexico City.\n- Create an image of someone in their production studio creating beats, futuristic, dim lighting, bronx new york\n- Create a track that sounds like it might come from kanye west, hip-hop, soul samples, heavy drums, innovative melodies, experimental sounds, unusual effects, automation, modulating filters, distortion effects\n- Create a video of clown fish swimming in a coral reef, beautiful, 8k, perfect, award winning, national geographic\n\n## Prerequisites\n\nTo run this app, you must have the following:\n\n1. OpenAI API Key\n2. Replicate token\n\n## Running the app\n\nTo run this app, follow these steps:\n\n1. Clone the repo\n\n```sh\ngit clone git@github.com:dabit3/openai-functions-god-app.git\n```\n\n2. Change into the directory and install the dependencies:\n\n```sh\ncd openai-functions-god-app\nnpm install\n```\n\n3. Set environment variables in a file named `.env.local` (you can copy `.example.env.local`)\n\n```\nOPENAI_API_KEY=\nREPLICATE_TOKEN=\n```\n\n4. Run the app\n\n```sh\nnpm start\n```"
  },
  {
    "path": "api.ts",
    "content": ""
  },
  {
    "path": "app/api/gpt/route.ts",
    "content": "import { NextRequest, NextResponse } from 'next/server'\nimport Replicate from 'replicate'\n\nconst replicate = new Replicate({\n  auth: process.env.REPLICATE_TOKEN || ''\n})\n\nconst KEY = process.env.OPENAI_API_KEY\nconst base_uri = 'https://api.openai.com/v1/chat/completions'\n\nconst headers = {\n  'Content-Type': 'application/json',\n  'Authorization': `Bearer ${KEY}`\n}\n\nconst data = {\n  'model': 'gpt-4'\n}\n\nexport async function POST(req: NextRequest, res: NextResponse) {\n  try {\n    const { query } = await req.json()\n\n    const requestData = {\n      ...data,\n      'messages': [\n        {'role': 'user', 'content': query }\n      ],\n      functions: [\n        {\n          name: 'createVideo',\n          description: 'generate a video using replicate, an AI LLM',\n          parameters: {\n            type: 'object',\n            properties: {\n              prompt: {\n                type: 'string',\n                description: 'the main prompt that should be passed in to the LLM'\n              },\n              guidance_scale: {\n                type: 'integer',\n                description: 'This is the requested guidance scale for the video in a numeric value. Default to 17.5 if none is defined in the prompt.'\n              },\n              num_frames: {\n                type: 'integer',\n                description: 'The number of frames if defined in the prompt'\n              },\n              height: {\n                type: 'integer',\n                description: 'The height of the video if defined in the prompt. Not affected by resolution.'\n              },\n              width: {\n                type: 'integer',\n                description: 'The width of the video if defined in the prompt. Not affected by resolution.'\n              }\n            },\n            'required': ['prompt', 'guidance_scale'],\n          }\n        },\n        {\n          name: 'createMusic',\n          description: 'generate music using replicate',\n          parameters: {\n            type: 'object',\n            properties: {\n              prompt: {\n                type: 'string',\n                description: 'the exact prompt passed in'\n              }\n            }\n          }\n        },\n        {\n          name: 'createImage',\n          description: 'generates an image using replicate',\n          parameters: {\n            type: 'object',\n            properties: {\n              prompt: {\n                type: 'string',\n                description: 'the exact prompt passed in'\n              }\n            }\n          }\n        }\n      ],\n      function_call: 'auto'\n    }\n\n    const response = await fetch(base_uri, {\n      method: 'POST',\n      headers,\n      body: JSON.stringify(requestData)\n    })\n\n    const json = await response.json()\n    let choice = json.choices[0]\n    \n    const { function_call } = choice.message\n    console.log('function_call: ', function_call)\n    if (function_call) {\n      const args = JSON.parse(function_call.arguments)\n      if (function_call.name === 'createVideo') {\n        const output = await replicate.run(\n          'anotherjesse/zeroscope-v2-xl:71996d331e8ede8ef7bd76eba9fae076d31792e4ddf4ad057779b443d6aea62f',\n          {\n            input: {\n              ...args,\n            }\n          }\n        );\n        return NextResponse.json({\n          data: output,\n          type: 'video'\n        });\n      }\n      if (function_call.name === 'createMusic') {\n        const output = await replicate.run(\n          'joehoover/musicgen:7a76a8258b23fae65c5a22debb8841d1d7e816b75c2f24218cd2bd8573787906',\n          {\n            input: {\n              model_version: 'melody',\n              ...args\n            }\n          }\n        )\n        return NextResponse.json({\n          data: output,\n          type: 'audio'\n        });\n      }\n      if (function_call.name === 'createImage') {\n        const output = await replicate.run(\n          'ai-forever/kandinsky-2:601eea49d49003e6ea75a11527209c4f510a93e2112c969d548fbb45b9c4f19f',\n          {\n            input: {\n              ...args\n            }\n          }\n        )\n        return NextResponse.json({\n          data: output,\n          type: 'image'\n        });\n      }\n    }\n\n    else {\n      console.log('choice: ', choice)\n      return NextResponse.json({\n        data: choice.message.content,\n        type: 'text',\n      });\n    }\n  } catch (err) {\n    console.log('error: ', err)\n    return NextResponse.json({ error: err });\n  }\n}"
  },
  {
    "path": "app/globals.css",
    "content": "@tailwind base;\n@tailwind components;\n@tailwind utilities;\n\n:root {\n  --foreground-rgb: 0, 0, 0;\n  --background-start-rgb: 214, 219, 220;\n  --background-end-rgb: 255, 255, 255;\n}\n\n@media (prefers-color-scheme: dark) {\n  :root {\n    --foreground-rgb: 255, 255, 255;\n    --background-start-rgb: 0, 0, 0;\n    --background-end-rgb: 0, 0, 0;\n  }\n}\n\nbody {\n  color: rgb(var(--foreground-rgb));\n  background: linear-gradient(\n      to bottom,\n      transparent,\n      rgb(var(--background-end-rgb))\n    )\n    rgb(var(--background-start-rgb));\n}\n"
  },
  {
    "path": "app/layout.tsx",
    "content": "import './globals.css'\nimport { Inter } from 'next/font/google'\n\nconst inter = Inter({ subsets: ['latin'] })\n\nexport const metadata = {\n  title: 'Create Next App',\n  description: 'Generated by create next app',\n}\n\nexport default function RootLayout({\n  children,\n}: {\n  children: React.ReactNode\n}) {\n  return (\n    <html lang=\"en\">\n      <body className={inter.className}>{children}</body>\n    </html>\n  )\n}\n"
  },
  {
    "path": "app/page.tsx",
    "content": "'use client'\nimport { useState } from 'react'\n\nexport default function Home() {\n  const [input, setInput] = useState('')\n  const [image, setImage] = useState('')\n  const [audio, setAudio] = useState('')\n  const [video, setVideo] = useState('')\n  const [text, setText] = useState('')\n  async function callApi() {\n    try {\n      if (!input) return\n      setImage('')\n      setAudio('')\n      setVideo('')\n      setText('')\n      const response = await fetch('/api/gpt', {\n        method: \"POST\",\n        body: JSON.stringify({\n          query: input\n        })\n      })\n      const { data, type } = await response.json()\n      console.log('data:', data)\n      if (type === 'image') {\n        setImage(data[0])\n      }\n      if (type === 'video') {\n        setVideo(data[0])\n      }\n      if (type === 'audio') {\n        setAudio(data)\n      }\n      if (type == 'text') {\n        setText(data)\n      }\n    } catch (err) {\n      console.log('error;', err)\n    }\n  }\n\n  return (\n    <main className=\"flex flex-col items-center justify-between p-24\">\n      <input\n        className=\"text-black px-3 py-1 rounded\"\n        onChange={e => setInput(e.target.value)}\n      />\n      <button\n        onClick={callApi}\n        className=\"rounded-full bg-green-500 text-white py-3 px-14 mt-3 mb-4 cursor-pointer\"\n      >IMAGINE</button>\n      {\n        image && <img src={image} width=\"500px\" />\n      }\n      {\n        video && <video src={video} controls></video>\n      }\n      {\n        text && <p>{text}</p>\n      }\n      {\n        audio && (\n          <audio controls>\n            <source src={audio} type=\"audio/wav\"></source>\n          </audio>\n        )\n      }\n    </main>\n  )\n}\n"
  },
  {
    "path": "next.config.js",
    "content": "/** @type {import('next').NextConfig} */\nconst nextConfig = {}\n\nmodule.exports = nextConfig\n"
  },
  {
    "path": "package.json",
    "content": "{\n  \"name\": \"gpt-functions\",\n  \"version\": \"0.1.0\",\n  \"private\": true,\n  \"scripts\": {\n    \"dev\": \"next dev\",\n    \"build\": \"next build\",\n    \"start\": \"next start\",\n    \"lint\": \"next lint\"\n  },\n  \"dependencies\": {\n    \"@types/node\": \"20.3.2\",\n    \"@types/react\": \"18.2.14\",\n    \"@types/react-dom\": \"18.2.6\",\n    \"autoprefixer\": \"10.4.14\",\n    \"eslint\": \"8.43.0\",\n    \"eslint-config-next\": \"13.4.7\",\n    \"next\": \"13.4.7\",\n    \"postcss\": \"8.4.24\",\n    \"react\": \"18.2.0\",\n    \"react-dom\": \"18.2.0\",\n    \"replicate\": \"^0.12.3\",\n    \"tailwindcss\": \"3.3.2\",\n    \"typescript\": \"5.1.6\"\n  }\n}\n"
  },
  {
    "path": "postcss.config.js",
    "content": "module.exports = {\n  plugins: {\n    tailwindcss: {},\n    autoprefixer: {},\n  },\n}\n"
  },
  {
    "path": "tailwind.config.js",
    "content": "/** @type {import('tailwindcss').Config} */\nmodule.exports = {\n  content: [\n    './pages/**/*.{js,ts,jsx,tsx,mdx}',\n    './components/**/*.{js,ts,jsx,tsx,mdx}',\n    './app/**/*.{js,ts,jsx,tsx,mdx}',\n  ],\n  theme: {\n    extend: {\n      backgroundImage: {\n        'gradient-radial': 'radial-gradient(var(--tw-gradient-stops))',\n        'gradient-conic':\n          'conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))',\n      },\n    },\n  },\n  plugins: [],\n}\n"
  },
  {
    "path": "tsconfig.json",
    "content": "{\n  \"compilerOptions\": {\n    \"target\": \"es5\",\n    \"lib\": [\"dom\", \"dom.iterable\", \"esnext\"],\n    \"allowJs\": true,\n    \"skipLibCheck\": true,\n    \"strict\": true,\n    \"forceConsistentCasingInFileNames\": true,\n    \"noEmit\": true,\n    \"esModuleInterop\": true,\n    \"module\": \"esnext\",\n    \"moduleResolution\": \"node\",\n    \"resolveJsonModule\": true,\n    \"isolatedModules\": true,\n    \"jsx\": \"preserve\",\n    \"incremental\": true,\n    \"noImplicitAny\": false,\n    \"plugins\": [\n      {\n        \"name\": \"next\"\n      }\n    ],\n    \"paths\": {\n      \"@/*\": [\"./*\"]\n    }\n  },\n  \"include\": [\"next-env.d.ts\", \"**/*.ts\", \"**/*.tsx\", \".next/types/**/*.ts\"],\n  \"exclude\": [\"node_modules\"]\n}\n"
  }
]