From 7b50cefdbd7e676b336a3fe4f449a7c1975be3ce Mon Sep 17 00:00:00 2001 From: Noah Solomon Date: Thu, 21 Nov 2024 23:06:56 -0500 Subject: [PATCH] coin composition components --- generate/src/CoinChart.tsx | 4 +- generate/src/Composition.tsx | 373 ------------------------ generate/src/composition-coin-chart.tsx | 102 +------ generate/src/composition-coin.tsx | 16 +- 4 files changed, 32 insertions(+), 463 deletions(-) delete mode 100644 generate/src/Composition.tsx diff --git a/generate/src/CoinChart.tsx b/generate/src/CoinChart.tsx index eaa3a0d..a4a7f75 100644 --- a/generate/src/CoinChart.tsx +++ b/generate/src/CoinChart.tsx @@ -1,5 +1,4 @@ import { Composition, staticFile } from 'remotion'; -import { AudiogramComposition, AudioGramSchema } from './Composition'; import './style.css'; import { initialAgentName, @@ -9,6 +8,7 @@ import { } from './tmp/context'; import { getAudioDuration } from '@remotion/media-utils'; import { CoinChartComposition } from './composition-coin-chart'; +import { z } from 'zod'; const PROCESS_ID = 0; @@ -20,7 +20,7 @@ export default function CoinChart() { fps={fps} width={1080} height={1920} - schema={AudioGramSchema} + schema={z.object({})} defaultProps={{ // Audio settings audioOffsetInSeconds: 0, diff --git a/generate/src/Composition.tsx b/generate/src/Composition.tsx deleted file mode 100644 index 81fde13..0000000 --- a/generate/src/Composition.tsx +++ /dev/null @@ -1,373 +0,0 @@ -import { useAudioData, visualizeAudio } from '@remotion/media-utils'; -import React, { useEffect, useRef, useState } from 'react'; -import { - AbsoluteFill, - Audio, - continueRender, - Img, - OffthreadVideo, - Sequence, - staticFile, - useCurrentFrame, - useVideoConfig, -} from 'remotion'; -import { fps, music } from './tmp/context'; -import { PaginatedSubtitles } from './Subtitles'; -import { z } from 'zod'; -import { zColor } from '@remotion/zod-types'; - -type SubtitleEntry = { - index: string; - startTime: number; - endTime: number; - text: string; - srt: string; - srtFileIndex: number; -}; - -const AgentDetailsSchema = z.record( - z.object({ - color: zColor(), - image: z.string().refine((s) => s.endsWith('.png'), { - message: 'Agent image must be a .png file', - }), - }) -); - -const srtTimeToSeconds = (srtTime: string) => { - const [hours, minutes, secondsAndMillis] = srtTime.split(':'); - const [seconds, milliseconds] = secondsAndMillis.split(','); - return ( - Number(hours) * 3600 + - Number(minutes) * 60 + - Number(seconds) + - Number(milliseconds) / 1000 - ); -}; - -const parseSRT = ( - srtContent: string, - srtFileIndex: number -): SubtitleEntry[] => { - // Split content into subtitle blocks - const blocks = srtContent.split('\n\n'); - - // Extract timestamps and text from each block - return blocks - .map((block) => { - const lines = block.split('\n'); - const indexLine = lines[0]; - const timeLine = lines[1]; - - if (!indexLine || !timeLine || lines.length < 3) { - return null; - } - - const [startTime, endTime] = timeLine - .split(' --> ') - .map(srtTimeToSeconds); - - // Combine all text lines into one text block - const textLines = lines.slice(2).join(' '); - - return { - index: indexLine, - startTime, - endTime, - text: textLines, - srt: block, // Include only this block of text - srtFileIndex, // Add the index of the SRT file - }; - }) - .filter((entry): entry is SubtitleEntry => entry !== null); -}; - -const SubtitleFileSchema = z.object({ - name: z.string(), - file: z.string().refine((s) => s.endsWith('.srt'), { - message: 'Subtitle file must be a .srt file', - }), - asset: z.string(), -}); - -export const AudioGramSchema = z.object({ - initialAgentName: z.string(), - agentDetails: AgentDetailsSchema, - videoFileName: z.string(), - durationInSeconds: z.number().positive(), - audioOffsetInSeconds: z.number().min(0), - subtitlesFileName: z.array(SubtitleFileSchema), - audioFileName: z.string().refine((s) => s.endsWith('.mp3'), { - message: 'Audio file must be a .mp3 file', - }), - titleText: z.string(), - titleColor: zColor(), - subtitlesTextColor: zColor(), - subtitlesLinePerPage: z.number().int().min(0), - subtitlesLineHeight: z.number().int().min(0), - subtitlesZoomMeasurerSize: z.number().int().min(0), - mirrorWave: z.boolean(), - waveLinesToDisplay: z.number().int().min(0), - waveFreqRangeStartIndex: z.number().int().min(0), - waveNumberOfSamples: z.enum(['32', '64', '128', '256', '512']), -}); - -type AudiogramCompositionSchemaType = z.infer; - -const AudioViz: React.FC<{ - numberOfSamples: number; - freqRangeStartIndex: number; - waveColor: string; - waveLinesToDisplay: number; - mirrorWave: boolean; - audioSrc: string; -}> = ({ - numberOfSamples, - waveColor, - freqRangeStartIndex, - waveLinesToDisplay, - mirrorWave, - audioSrc, -}) => { - const frame = useCurrentFrame(); - - const audioData = useAudioData(audioSrc); - - if (!audioData) { - return null; - } - - const frequencyData = visualizeAudio({ - fps, - frame, - audioData, - numberOfSamples, // Use more samples to get a nicer visualisation - }); - - // Pick the low values because they look nicer than high values - // feel free to play around :) - const frequencyDataSubset = frequencyData.slice( - freqRangeStartIndex, - freqRangeStartIndex + - (mirrorWave ? Math.round(waveLinesToDisplay / 2) : waveLinesToDisplay) - ); - - const frequencesToDisplay = mirrorWave - ? [...frequencyDataSubset.slice(1).reverse(), ...frequencyDataSubset] - : frequencyDataSubset; - - return ( -
- {frequencesToDisplay.map((v, i) => { - return ( -
- ); - })} -
- ); -}; - -export const AudiogramComposition: React.FC = ({ - subtitlesFileName, - agentDetails, - audioFileName, - subtitlesLinePerPage, - initialAgentName, - waveNumberOfSamples, - waveFreqRangeStartIndex, - waveLinesToDisplay, - subtitlesZoomMeasurerSize, - subtitlesLineHeight, - mirrorWave, - audioOffsetInSeconds, - videoFileName, -}) => { - const [currentAgentName, setCurrentAgentName] = useState(''); - const { durationInFrames, fps } = useVideoConfig(); - const frame = useCurrentFrame(); - const [subtitlesData, setSubtitlesData] = useState([]); - const [currentSubtitle, setCurrentSubtitle] = useState( - null - ); - const [handle] = useState(null); - const [prevImageIdx, setPrevImageIdx] = useState(0); - const ref = useRef(null); - const [currentSrtContent, setCurrentSrtContent] = useState(''); - - // Determine the current subtitle and agent based on the frame - useEffect(() => { - if (subtitlesData.length > 0) { - const currentTime = frame / fps; - const currentSubtitle = subtitlesData.find( - (subtitle) => - currentTime >= subtitle.startTime && currentTime < subtitle.endTime - ); - - if (currentSubtitle) { - setPrevImageIdx(currentSubtitle.srtFileIndex); - setCurrentSubtitle(currentSubtitle); - // Use the srtFileIndex to find the corresponding agent name - const agentInfo = subtitlesFileName[currentSubtitle.srtFileIndex]; - setCurrentAgentName(agentInfo.name); - } - } - }, [frame, fps, subtitlesData, subtitlesFileName]); - - // Fetch and parse all SRT files - useEffect(() => { - const fetchSubtitlesData = async () => { - try { - const data = await Promise.all( - subtitlesFileName.map(async ({ file }, index) => { - // Pass the index to parseSRT - const response = await fetch(file); - const text = await response.text(); - return parseSRT(text, index); - }) - ); - setSubtitlesData(data.flat().sort((a, b) => a.startTime - b.startTime)); - } catch (error) { - console.error('Error fetching subtitles:', error); - } - }; - - fetchSubtitlesData(); - }, [subtitlesFileName]); - - // Determine the current subtitle based on the frame - useEffect(() => { - if (subtitlesData.length > 0) { - const currentTime = frame / fps; - const current = subtitlesData.find( - (subtitle) => - currentTime >= subtitle.startTime && currentTime < subtitle.endTime - ); - setCurrentSubtitle(current || null); - } - }, [frame, fps, subtitlesData]); - - // Ensure that the delayRender handle is cleared when the component unmounts - useEffect(() => { - return () => { - if (handle !== null) { - continueRender(handle); - } - }; - }, [handle]); - - useEffect(() => { - if (currentSubtitle) { - setCurrentSrtContent(currentSubtitle.srt); - } - }, [currentSubtitle]); - - const audioOffsetInFrames = Math.round(audioOffsetInSeconds * fps); - - return ( -
- - - {/*@ts-ignore */} - - - - - -
- ); -}; diff --git a/generate/src/composition-coin-chart.tsx b/generate/src/composition-coin-chart.tsx index c7c14cb..8d3c45b 100644 --- a/generate/src/composition-coin-chart.tsx +++ b/generate/src/composition-coin-chart.tsx @@ -1,92 +1,22 @@ -'use client'; - -import React, { useRef } from 'react'; -import { - AbsoluteFill, - Sequence, - useCurrentFrame, - useVideoConfig, -} from 'remotion'; -import { - Line, - LineChart, - XAxis, - YAxis, - Tooltip, - ResponsiveContainer, -} from 'recharts'; -import { Card, CardContent } from '@/components/ui/card'; -import { - ChartContainer, - ChartTooltip, - ChartTooltipContent, -} from '@/components/ui/chart'; - -// Mock data generator -const generateMockData = (frame: number) => { - const data = []; - for (let i = 0; i < 30; i++) { - data.push({ - date: new Date(2023, 0, i + 1).toISOString().split('T')[0], - price: - Math.sin(i * 0.2 + frame * 0.1) * 1000 + 5000 + Math.random() * 200, - }); - } - return data; -}; +import React from 'react'; +import { AbsoluteFill, Sequence } from 'remotion'; +import TradingviewWidget from './tradingview-widget'; export const CoinChartComposition: React.FC = () => { - const frame = useCurrentFrame(); - const { durationInFrames, fps } = useVideoConfig(); - const ref = useRef(null); - - const data = generateMockData(frame); - return ( -
- - -
-
-
- - - - - - - new Date(value).toLocaleDateString() - } - /> - - } /> - - - - - - -
+ + {/* Price History Chart */} + +
+ {/* Top half - empty */} +
+ + {/* Bottom half - charts */} +
+
- - -
+
+
+
); }; diff --git a/generate/src/composition-coin.tsx b/generate/src/composition-coin.tsx index 87040c7..00d4e6e 100644 --- a/generate/src/composition-coin.tsx +++ b/generate/src/composition-coin.tsx @@ -16,6 +16,7 @@ import { fps, music } from './tmp/context'; import { PaginatedSubtitles } from './Subtitles'; import { z } from 'zod'; import { zColor } from '@remotion/zod-types'; +import TradingviewWidget, { Timeframe } from './tradingview-widget'; type SubtitleEntry = { index: string; @@ -325,11 +326,22 @@ export const AudiogramComposition: React.FC = ({
- + /> */} +
+ { + const quarter = durationInFrames / 4; + if (frame < quarter) return '1D'; + if (frame < quarter * 2) return '1M'; + if (frame < quarter * 3) return '6M'; + return '1Y'; + })()} + /> +