Showing posts with label code. Show all posts
Showing posts with label code. Show all posts

Better Python Standard Library Autocompletion for Notepad++

Posted: Thursday, 7 July 2016

Download: http://static.extramaster.net/python.xml.zip (476kb download - 3,367kb extracted)
Note that there's 13,563 definitions included, so any application that attempts to parse XML structure will have a bad time.

Before:


After:




Using the Notepad++ "python.xml" generator at: https://sourceforge.net/projects/npp-python/, in conjunction with a script that imports all Python modules: http://stackoverflow.com/questions/1206832/importing-the-entire-python-standard-library, on a computer with a fresh Python install, nabs a "python.xml" file with definitions and autocomplete entries for all Python Standard Library modules.


Download: http://static.extramaster.net/python.xml.zip
Or if github gists are your thing: https://gist.github.com/extramaster/9d0b7eba99a9708eb3f71ae95bcd42b5

The "python.xml" file should be placed in "C:\Program Files (x86)\Notepad++\plugins\APIs" or "C:\Program Files\Notepad++\plugins\APIs", overwriting the pre-installed copy.

In case you want to revert back to the pre-installed "python.xml" file, here's a copy: http://static.extramaster.net/python_original.xml.zip

Here's a list of all of the Python Standard Libraries used in the creation of the autocomplete file:
import _bsddb
import _ctypes
import _ctypes_test
import _elementtree
import _hashlib
import _msi
import _multiprocessing
import _socket
import _sqlite3
import _ssl
import _testcapi
import _tkinter
import bz2
import pyexpat
import select
import unicodedata
import winsound
import BaseHTTPServer
import Bastion
import CGIHTTPServer
import ConfigParser
import Cookie
import DocXMLRPCServer
import HTMLParser
import MimeWriter
import Queue
import SimpleHTTPServer
import SimpleXMLRPCServer
import SocketServer
import StringIO
import UserDict
import UserList
import UserString
import __future__
import _abcoll
import _osx_support
import _pyio
import _strptime
import _threading_local
import _weakrefset
import abc
import aifc
import antigravity
import anydbm
import argparse
import ast
import asynchat
import asyncore
import atexit
import audiodev
import base64
import bdb
import binhex
import bisect
import bsddb
import cProfile
import calendar
import cgi
import cgitb
import chunk
import cmd
import code
import codecs
import codeop
import collections
import colorsys
import commands
import compileall
import compiler
import contextlib
import cookielib
import copy
import copy_reg
import csv
import ctypes
import dbhash
import decimal
import difflib
import dircache
import dis
import distutils
import doctest
import dumbdbm
import dummy_thread
import dummy_threading
import email
import encodings
import filecmp
import fileinput
import fnmatch
import formatter
import fpformat
import fractions
import ftplib
import functools
import genericpath
import getopt
import getpass
import gettext
import glob
import gzip
import hashlib
import heapq
import hmac
import hotshot
import htmlentitydefs
import htmllib
import httplib
import idlelib
import ihooks
import imaplib
import imghdr
import importlib
import imputil
import inspect
import io
import json
import keyword
import lib2to3
import linecache
import locale
import logging
import macpath
import macurl2path
import mailbox
import mailcap
import markupbase
import md5
import mhlib
import mimetools
import mimetypes
import mimify
import modulefinder
import msilib
import multifile
import multiprocessing
import mutex
import netrc
import new
import nntplib
import ntpath
import nturl2path
import numbers
import opcode
import optparse
import os
import os2emxpath
import pdb
import pickle
import pickletools
import pipes
import pkgutil
import platform
import plistlib
import popen2
import poplib
import posixfile
import posixpath
import pprint
import profile
import pstats
import py_compile
import pyclbr
import pydoc
import pydoc_data
import quopri
import random
import re
import repr
import rexec
import rfc822
import rlcompleter
import robotparser
import runpy
import sched
import sets
import sgmllib
import sha
import shelve
import shlex
import shutil
import site
import smtpd
import smtplib
import sndhdr
import socket
import sqlite3
import sre
import sre_compile
import sre_constants
import sre_parse
import ssl
import stat
import statvfs
import string
import stringold
import stringprep
import struct
import subprocess
import sunau
import sunaudio
import symbol
import symtable
import sysconfig
import tabnanny
import tarfile
import telnetlib
import tempfile
import test
import textwrap
import this
import threading
import timeit
import toaiff
import token
import tokenize
import trace
import traceback
import types
import unittest
import urllib
import urllib2
import urlparse
import user
import uu
import uuid
import warnings
import wave
import weakref
import webbrowser
import whichdb
import wsgiref
import xdrlib
import xml
import xmllib
import xmlrpclib
import zipfile
import Canvas
import Dialog
import FileDialog
import FixTk
import ScrolledText
import SimpleDialog
import Tix
import Tkconstants
import Tkdnd
import Tkinter
import tkColorChooser
import tkCommonDialog
import tkFileDialog
import tkFont
import tkMessageBox
import tkSimpleDialog
import ttk
import turtle

Playing MIDI tracks from an Arduino

Posted: Thursday, 29 October 2015

Link: https://www.extramaster.net/tools/midiToArduino/

Table of Contents (TOC)


^ TOC

So, you just learnt how to control a buzzer using an Arduino, but you want more then just simple beeps and hums - you want to assert your dominance over the sound-waves to show-off how well you can control the buzzer.

Cynical? Well there's no denying that it's a sentiment that we all share once we learn something new, but hey, if you've managed to acquire and get an Arduino working, then that's quite far an accomplishment.

A quick search of "music buzzer arduino" should point you to the right direction, but no-one has the time to individually hand-code and map the midi-tones to the frequency values that the Arduino's Tone function requires, especially if you're working with a long MIDI track.

The solution?

https://www.extramaster.net/tools/midiToArduino/

Circuitry

^ TOC

Wiring the Buzzer is trivial, just place the buzzer on two strips (on a breadboard), connect one strip to Pin 11 on your Arduino, and the other strip to the Ground.

Demo

^ TOC
https://www.youtube.com/watch?v=B1oHQzp1P3w
https://www.youtube.com/watch?v=4FDFpycApP4

Note that this also works with Raspberry Pis.
https://www.youtube.com/watch?v=HOisQF-JaS0


Steps/Instructions

^ TOC
  1. Visit: https://www.extramaster.net/tools/midiToArduino/
  2. Upload a midi file
  3. Choose the track that you want to export
  4. Copy the resulting code to a new Arduino sketch

Sample Exported Arduino Code

^ TOC

Didn't get the midi file to work with the web app? No worries, here are some sample code generated by the web-app

Midi: Fleuron-128 - Heaven - Song: Shaun Frank & KSHMR - Heaven (feat. Delaney Jane)

Midi: Antergy - DB15 - Song: Atmozfears - DB15

Midi: Kevin Fishburne - Terra's Theme - Song: Final Fantasy VI - Terra's Theme

https://www.extramaster.net/tools/midiToArduino/

Final Notes

^ TOC Oh, and by the way, it appears that you can leave the Piezo buzzer running on loop for an extensive amount of time (24 hours+), so if you want to play a midi indefinitely, you can use an Arduino to do so.

Unfortunately, you can only have one tone running at a time, so if your midi track has multiple keys being played simultaneously, expect some wacky results. (here - try this: http://www.forelise.com/midi - "Track 2: Acoustic Grand Piano - Piano - Fr Elise")
If you attempt some protothreads hack, then expect only one Piezo buzzer to work (at a time). But if you're game, and want to give using protothreads a try:





https://www.extramaster.net/tools/midiToArduino/

Python: Creating a timed image slideshow with PIL and OpenCV v2

Posted: Friday, 17 July 2015

Problem

Apparently, one of the hardest video-editing task to do with a script is to create a dynamically-timed slideshow without any fancy-drag and drop GUIs.


With Adobe After Effects, you cannot dynamically load external images using an expression (they will need to be loaded into your project beforehand, and even then, you cannot load the image into a comp with an expression).

And adding hundreds of layers of images and having to go through each and every one of them to edit the expression is a fairly tedious task.

Worse of all, every change that you make - such as adding a new image to the slideshow  - will compound towards the chore of doing things manually.


Solution

Building upon my last two posts: Python: Converting from PIL to OpenCV 2 Image Formats and Python: PIL to mp4, we've reached the end.

With "Python: PIL to mp4", a simple blending transition was created using PIL and OpenCV, But the objective of this post is to introduce timings to delay the animation for numerous/multiple images.

We can extend this idea of having a primitive transition to allow for an image to be delayed from transitioning until a certain amount of time has elapsed, and to allow the transition to occur after "x" amount of seconds, hence forming a slideshow.


Process

Initialization

So to start off with, we're going to need some data to work with.
Since it's Python, you can do whatever you want to feed data in - you could use a JSON file, CSV, Pickle, whatever you're comfortable with, or perhaps, whatever arbitrary file format that you're locked into using.

But here, a basic python array will be used to indicate the timings and image file that will be fed into the slideshow, amongst other data...
songData = [
    [390, u'Fractal', u'Itvara', 'minimix', u'image1.jpg'],
    [322, u'Case & Point', u'Error Code', 'minimix', u'image2.jpg'],
    [261, u'Excision & Pegboard Nerds', u'Bring the Madness (Noisestorm Remix) [feat. Mayor Apeshit]', 'minimix', u'image3.jpg'],
    [157, u'Nitro Fun', u'Final Boss', 'minimix', u'image4.jpg'],
    [88, u'Astronaut', u'Quantum (Virtual Riot Remix)', 'minimix', u'image5.jpg'],
    [0, u'Fractal', u'Contact', 'minimix', u'image6.jpg']]

As you can see in the data above, the most relevant data is songData[][0] and songData[][4], indicating the timings (in seconds) and the image file locations, respectively.

We're going to set the FPS of the slideshow... 60FPS is the standard nowadays, so we're going to set that and process the songData above to reflect this...
FPS = 60 # Sets the FPS of the entire video
currentFrame = 0 # The animation hasn't moved yet, so we're going to leave it as zero
startFrame = 0 # The animation of the "next" image starts at "startFrame", at most
trailingSeconds = 5 # Sets the amount of time we give our last image (in seconds)
blendingDuration = 3.0 # Sets the amount of time that each transition should last for
                       # This could be more dynamic, but for now, a constant transition period is chosen
blendingStart = 10 # Sets the time in which the image starts blending before songFile

for i in songData:
    i[0] = i[0] * FPS # Makes it so that iterating frame-by-frame will result in properly timed slideshows

Now the first image is going to be loaded in by the script - as so:
im1 = Image.open(songData[-1][4]) # Load the image in
im2 = im1 # Define a second image to force a global variable to be created

current = songData[-1][4] # We're going to let the script know the location of the current image's location
previous = current # And this is to force/declare a global variable

And next up is to create the actual OpenCV video handling capability. You can have a read-up about this here: Python: PIL to mp4
height, width, layers = np.array(im1).shape # Get some stats on the image file to create the video with
video = cv2.VideoWriter("slideshow.avi",-1,60,(width,height),True)

So that was the basic initialization routine. If you don't get how it works together yet, don't worry. Just read on - as the full code with everything combined is below.

Main loop

So the strategy behind generating this slideshow is to loop through each and every frame and continuously feed that into our output video file. Sure some corners can be cut - by which you only generate the transitions (leaving the gaps to be manually filled by an external program) - but this post is looking more into automating the entire slideshow generation process with only Python, PIL and OpenCV.

We're going to have a main while loop that sets the limit on how long our slideshow should last.
while currentFrame < songData[0][0] + FPS * 60 * trailingSeconds: # RHS defines the limit of the slideshow

And this is where the nitty gritty kicks in: the actual code that makes the transition between each image within the slideshow...
    for i in songData: # Loop through each image timing
        if currentFrame >= i[0] - (blendingStart * FPS): # If the image timing happens to be for the
                                                         # current image, the continue on...
                                                         # (Notice how songData is reversed)
                                                         
            # The print statement adds some verbosity to the program
            print str(currentFrame) + " - " + str(i[0] - (blendingStart * FPS)) + " - " + i[2]
            if not current == i[4]: # Check if the image file has changed
                previous = current # We'd want the transition to start if the file has changed
                current = i[4]
                startFrame = i[0] - (blendingStart * FPS)

                # The two images in question for the blending is loaded in
                im1 = Image.open(previous)
                im2 = Image.open(current)
            break

    # See: http://blog.extramaster.net/2015/07/python-pil-to-mp4.html for the part below
    diff = Image.blend(im1, im2, min(1.0, (currentFrame - startFrame) / float(FPS) / blendingDuration))
    video.write(cv2.cvtColor(np.array(diff), cv2.COLOR_RGB2BGR))
    
    currentFrame += 1 # Next frame
The ending to this program is pretty self-explanatory...
# At this point, we'll assume that the slideshow has completed generating, and we want to close everything off to prevent a corrupted output.
video.release()


All together now!

So here's all the code required to create a timed image slideshow with PIL and OpenCV v2!

Code:
from PIL import Image
import cv2
import numpy as np

songData = [
    [390, u'Fractal', u'Itvara', 'minimix', u'image1.jpg'],
    [322, u'Case & Point', u'Error Code', 'minimix', u'image2.jpg'],
    [261, u'Excision & Pegboard Nerds', u'Bring the Madness (Noisestorm Remix) [feat. Mayor Apeshit]', 'minimix', u'image3.jpg'],
    [157, u'Nitro Fun', u'Final Boss', 'minimix', u'image4.jpg'],
    [88, u'Astronaut', u'Quantum (Virtual Riot Remix)', 'minimix', u'image5.jpg'],
    [0, u'Fractal', u'Contact', 'minimix', u'image6.jpg']]

FPS = 60 # Sets the FPS of the entire video
currentFrame = 0 # The animation hasn't moved yet, so we're going to leave it as zero
startFrame = 0 # The animation of the "next" image starts at "startFrame", at most
trailingSeconds = 5 # Sets the amount of time we give our last image (in seconds)
blendingDuration = 3.0 # Sets the amount of time that each transition should last for
                       # This could be more dynamic, but for now, a constant transition period is chosen
blendingStart = 10 # Sets the time in which the image starts blending before songFile

for i in songData:
    i[0] = i[0] * FPS # Makes it so that iterating frame-by-frame will result in properly timed slideshows

im1 = Image.open(songData[-1][4]) # Load the image in
im2 = im1 # Define a second image to force a global variable to be created

current = songData[-1][4] # We're going to let the script know the location of the current image's location
previous = current # And this is to force/declare a global variable

height, width, layers = np.array(im1).shape # Get some stats on the image file to create the video with
video = cv2.VideoWriter("slideshow.avi",-1,60,(width,height),True)

while currentFrame < songData[0][0] + FPS * 60 * trailingSeconds: # RHS defines the limit of the slideshow
    for i in songData: # Loop through each image timing
        if currentFrame >= i[0] - (blendingStart * FPS): # If the image timing happens to be for the
                                                         # current image, the continue on...
                                                         # (Notice how songData is reversed)
                                                         
            # The print statement adds some verbosity to the program
            print str(currentFrame) + " - " + str(i[0] - (blendingStart * FPS)) + " - " + i[2]
            if not current == i[4]: # Check if the image file has changed
                previous = current # We'd want the transition to start if the file has changed
                current = i[4]
                startFrame = i[0] - (blendingStart * FPS)

                # The two images in question for the blending is loaded in
                im1 = Image.open(previous)
                im2 = Image.open(current)
            break

    # See: http://blog.extramaster.net/2015/07/python-pil-to-mp4.html for the part below
    diff = Image.blend(im1, im2, min(1.0, (currentFrame - startFrame) / float(FPS) / blendingDuration))
    video.write(cv2.cvtColor(np.array(diff), cv2.COLOR_RGB2BGR))
    
    currentFrame += 1 # Next frame

# At this point, we'll assume that the slideshow has completed generating, and we want to close everything off to prevent a corrupted output.
video.release()



Sample output

So with all the code above, it begs the question, why do I need to create a slideshow using scripts?
Well, here's a little sample of what you can do with a simple little slideshow.
Note the timings from "songData",
songData = [
    [390, u'Fractal', u'Itvara', 6:30, u'image1.jpg'],
    [322, u'Case & Point', u'Error Code', 6:22, u'image2.jpg'],
    [261, u'Excision & Pegboard Nerds', u'Bring the Madness (Noisestorm Remix) [feat. Mayor Apeshit]', 4:21, u'image3.jpg'],
    [157, u'Nitro Fun', u'Final Boss', 2:37, u'image4.jpg'],
    [88, u'Astronaut', u'Quantum (Virtual Riot Remix)', 1:28, u'image5.jpg'],
    [0, u'Fractal', u'Contact', 0, u'image6.jpg']]
With this slideshow, you can really enhance the effect of audio-react "music" YouTube videos, especially Youtube Music Mixes, like this:
Direct Link: https://www.youtube.com/watch?v=XI25k5Z-t88
Direct Link: https://www.youtube.com/watch?v=XI25k5Z-t88