2019-12-16 18:09:44 +01:00
# encoding: utf-8
# ruby: 2.1.0
= begin
Rakefile to manage hardware projects
uses Lepton EDA for schematic and pcb - rnd for board layouts .
Rakefile instead of Makefile for better text file parsing capabilities .
= end
require 'rake/clean'
2019-12-17 00:48:29 +01:00
require 'csv' # to export BOM and costs
2019-12-19 16:18:00 +01:00
require 'open-uri' # to parse URLs
require 'open_uri_redirections' # allow redirections
require 'net/http' # to get sites
require 'nokogiri' # to parse sites
require 'json' # to parse GET site responses
2019-12-16 18:09:44 +01:00
# =================
# project variables
# =================
# common name used for file names
name = " usb_cable_tester "
2019-12-17 09:54:15 +01:00
# project version, read from "version" file
raise " define project version in 'version' file " unless File . exist? " version "
version = IO . read ( " version " ) . split ( " \n " ) [ 0 ]
# current date for stamping output
date = Time . now . strftime ( " %Y-%m-%d " )
# revision based on number of changes on schematic or board layout and current git commit
changes = ` git log --pretty=oneline " #{ name } .sch" " #{ name } .lht" | wc -l ` . chomp . to_i
commit = ` git rev-parse --short HEAD ` . chomp
revision = " #{ changes } ( #{ commit } ) "
2019-12-16 18:09:44 +01:00
# local QEDA parts
parts_local = [ ]
2020-02-03 16:52:14 +01:00
parts_local << " mcu/ic_mcu_st_stm32f103xc@LQFP144 "
parts_local << " connector/connector_usb-a-3.0_fci_10117835 "
parts_local << " connector/connector_usb-b-3.0_amphenol_gsb4211 "
parts_local << " connector/connector_usb-mini-b_edac_690-005-299-043 "
parts_local << " connector/connector_usb-micro-b-3.0_gct_usb3110 "
parts_local << " connector/connector_usb-micro-b-2.0_ali_32910334970 "
parts_local << " connector/connector_usb-c_hro_type-c-31-m-04 "
parts_local << " connector/connector_lightning_iphone5 "
parts_local << " vreg/vreg_ldo_torex_xc6206 "
parts_local << " display/display_lcd_lcm1602 "
parts_local << " display/display_lcd_i2cadapter "
parts_local << " display/display_oled_0.96in "
parts_local << " vreg/vreg_pmic_tpower_tp4056 "
parts_local << " transistor/transistor_pmos_nxp_bss84 "
parts_local << " mechanical/mechanical_button_6mm "
2020-02-03 17:02:23 +01:00
parts_local << " oscillator/hc-49@SM "
2020-02-07 17:11:15 +01:00
parts_local << " resistor/trimpot_3mm "
2020-02-07 15:57:19 +01:00
parts_local << " diode/diode_tvs_st_usblc6-2@SC6 "
2020-02-07 15:38:48 +01:00
parts_local << " connector/connector_molex_530470210 "
2020-02-17 10:54:44 +01:00
parts_local << " mcu/ic_mcu_st_stm8s003x3@P "
2019-12-16 18:09:44 +01:00
# the corresponding files
library = parts_local . collect { | part | " library/ #{ part . split ( '@' ) [ 0 ] . downcase } .yaml " }
# github QEDA parts
parts_github = [ ]
parts_github << " capacitor/c0603 "
parts_github << " resistor/r0603 "
2020-02-03 18:44:44 +01:00
parts_github << " te/1206sfh "
2019-12-16 18:09:44 +01:00
# path to qeda"
2020-02-05 17:48:40 +01:00
qeda = " qeda "
2019-12-17 09:54:15 +01:00
2019-12-16 18:09:44 +01:00
# ==========
# main tasks
# ==========
desc " main building task "
2019-12-17 16:02:10 +01:00
task :default = > [ :print , :fabrication , :bom ]
2019-12-16 18:09:44 +01:00
desc " print schematic and layout (as pdf) "
prints = [ " #{ name } .sch.pdf " , " #{ name } .brd.ps " , " #{ name } .brd-top.png " , " #{ name } .brd-bottom.png " ]
task :print = > prints
2019-12-17 09:54:15 +01:00
CLEAN . include ( [ " #{ name } .versioned.sch " , " #{ name } .versioned.lht " ] )
2019-12-16 18:09:44 +01:00
CLOBBER . include ( prints )
desc " generate fabrication gerbers (as archive) "
gerbers = [ " #{ name } .brd.asb " , " #{ name } .brd.ast " , " #{ name } .brd.gbl " , " #{ name } .brd.gbo " , " #{ name } .brd.gbp " , " #{ name } .brd.gbs " , " #{ name } .brd.gko " , " #{ name } .brd.gtl " , " #{ name } .brd.gto " , " #{ name } .brd.gtp " , " #{ name } .brd.gts " , " #{ name } .brd.xln " ]
fab = [ " #{ name } .brd.zip " ]
task :fabrication = > fab
CLEAN . include ( gerbers )
CLOBBER . include ( fab )
desc " generate footprints from parts "
task :library = > library do
# reset
sh " #{ qeda } reset "
# configure
sh " #{ qeda } config pattern.preferManufacturer false "
2020-02-03 17:38:49 +01:00
sh " #{ qeda } config pattern.densityLevel N "
2019-12-16 18:09:44 +01:00
sh " #{ qeda } config pattern.smoothPadCorners true "
# add local files
parts_local . each do | part |
sh " #{ qeda } add #{ part } "
end
# from github library
parts_github . each do | part |
sh " #{ qeda } add #{ part } "
end
# generate outputs
sh " #{ qeda } config output geda "
sh " #{ qeda } generate . "
sh " #{ qeda } config output coraleda "
sh " #{ qeda } generate . "
end
desc " export netlist from schematic "
net = [ " #{ name } .tdx " ]
task :netlist = > net
CLOBBER . include ( net )
2019-12-17 00:48:29 +01:00
desc " export notes from schematic "
notes = [ " #{ name } .notes.txt " ]
task :notes = > notes
CLOBBER . include ( notes )
2019-12-17 15:54:02 +01:00
desc " export BOMs from schematic "
boms = [ " #{ name } .bom.csv " ]
task :bom = > boms
CLOBBER . include ( boms )
2019-12-19 16:18:00 +01:00
desc " generate cost estimte "
costs = [ " #{ name } .cost.csv " ]
task :cost = > costs
CLOBBER . include ( costs )
2019-12-16 18:09:44 +01:00
# ===============
# file generation
# ===============
2019-12-17 09:54:15 +01:00
desc " generate schematic with version information all symbols embedded "
2019-12-17 16:02:10 +01:00
rule " .versioned.sch " = > " .sch " do | t |
2019-12-17 09:54:15 +01:00
sh " cp #{ t . source } #{ t . name } "
sh " lepton-embed --embed #{ t . name } 2> /dev/null "
sh " sed --in-place 's/ \\ $version \\ $/ #{ version } /' #{ t . name } "
sh " sed --in-place 's/ \\ $date \\ $/ #{ date } /' #{ t . name } "
sh " sed --in-place 's/ \\ $revision \\ $/ #{ revision } /' #{ t . name } "
end
desc " generate board layout with version information "
rule " .versioned.lht " = > " .lht " do | t |
sh " cp #{ t . source } #{ t . name } "
sh " sed --in-place 's/ \\ $version \\ $/ #{ version } /' #{ t . name } "
sh " sed --in-place 's/ \\ $date \\ $/ #{ date } /' #{ t . name } "
sh " sed --in-place 's/ \\ $revision \\ $/ #{ revision } /' #{ t . name } "
end
2019-12-16 18:09:44 +01:00
desc " generate printable version (PDF) of schematic "
2019-12-17 09:54:15 +01:00
rule " .sch.pdf " = > " .versioned.sch " do | t |
2019-12-16 18:09:44 +01:00
sh " lepton-cli export --color --paper iso_a4 --layout landscape --color --output #{ t . name } #{ t . source } 2> /dev/null "
end
desc " generate printable version (PostScript) of board layout "
2019-12-17 09:54:15 +01:00
rule " .brd.ps " = > " .versioned.lht " do | t |
2019-12-16 18:09:44 +01:00
sh " pcb-rnd -x ps --psfile #{ t . name } #{ t . source } 2> /dev/null "
end
desc " generate photo realistic picture from layout (top side) "
2019-12-17 09:54:15 +01:00
rule " .brd-top.png " = > " .versioned.lht " do | t |
2019-12-16 18:09:44 +01:00
sh " pcb-rnd -x png --dpi 1200 --photo-mode --outfile #{ t . name } #{ t . source } 2> /dev/null "
end
desc " generate photo realistic picture from layout (bottom side) "
2019-12-17 09:54:15 +01:00
rule " .brd-bottom.png " = > " .versioned.lht " do | t |
2019-12-16 18:09:44 +01:00
sh " pcb-rnd -x png --dpi 1200 --photo-mode --photo-flip-y --outfile #{ t . name } #{ t . source } 2> /dev/null "
end
desc " archive gerbers "
2019-12-17 16:02:10 +01:00
rule " .brd.zip " = > " .versioned.lht " do | t |
2020-01-12 16:55:35 +01:00
base = File . basename ( t . source , " .versioned.lht " )
puts base
2019-12-16 18:09:44 +01:00
sh " pcb-rnd -x cam gerber:JLC_PCB --outfile #{ base } .brd #{ t . source } 2> /dev/null "
gerbers = [ " #{ base } .brd.asb " , " #{ base } .brd.ast " , " #{ base } .brd.gbl " , " #{ base } .brd.gbo " , " #{ base } .brd.gbp " , " #{ base } .brd.gbs " , " #{ base } .brd.gko " , " #{ base } .brd.gtl " , " #{ base } .brd.gto " , " #{ base } .brd.gtp " , " #{ base } .brd.gts " , " #{ base } .brd.xln " ]
fab = [ " #{ name } .brd.zip " ]
sh " zip --quiet #{ t . name } #{ gerbers . join ( ' ' ) } "
end
desc " export netlist from schematic "
rule " .tdx " = > " .sch " do | t |
sh " lepton-netlist -g tEDAx -o #{ t . name } #{ t . source } 2> /dev/null "
end
2019-12-17 00:48:29 +01:00
desc " generate note file from schematic, listing the 'note' attributes from elements "
rule " .notes.txt " = > " .sch " do | t |
notes_data = bom2 ( t . prerequisites [ 0 ] , [ " note " , " value " ] )
File . open ( t . name , " w " ) do | notes_file |
notes_data . each do | note |
next unless note [ 'note' ]
note [ 'note' ] = note [ 'note' ] . gsub ( '. ' , " . \n " ) . gsub ( / \ n+$ / , '' )
notes_file . puts " #{ note [ 'value' ] } ( #{ note [ 'refdes' ] } ): \n #{ note [ 'note' ] } \n \n "
end
end
end
2019-12-17 15:54:02 +01:00
desc " generate BOM file from schematic "
rule " .bom.csv " = > " .sch " do | t |
attributes = [ " category " , " device " , " value " , " description " , " manufacturer " , " manufacturer-id " , " datasheet " , " lcsc-id " , " aliexpress-id " , " alternatives " ]
bom_data = bom2 ( t . prerequisites [ 0 ] , attributes )
CSV . open ( t . name , " wb " ) do | csv |
all_attributes = [ " refdes " , " qty " ] + attributes
csv << all_attributes
bom_data . each do | line |
csv << all_attributes . collect { | attribute | line [ attribute ] }
end
end
end
2019-12-19 16:18:00 +01:00
desc " generate cost estimate from schematic "
# this version uses Digi-Key, AliExpress, and LCSC
# Digi-Key is easily scrapable, while Mouser isn't
# Digi-Key is only one distributor, but the end prices across distributor is often similar
rule " .cost.csv " = > " .sch " do | t |
puts " scraping distributor sites to get prices. this may take some time "
sellers = [ 'digikey-id' , 'aliexpress-id' , 'lcsc-id' ] # get seller SKU
boards = [ 1 , 10 , 100 ] # calculate the price for as many boards
total_price = Array . new ( sellers . size ) { Array . new ( boards . size , 0 . 0 ) } # total price for x boards
unit_price = Array . new ( sellers . size ) { Array . new ( boards . size , 0 . 0 ) } # unit price for 1 board
# get component information
attributes = [ " value " , " manufacturer " , " manufacturer-id " ] + sellers # BOM fields to get
parts = bom2 ( t . prerequisites [ 0 ] , attributes ) # get field values
# put result in CVS
CSV . open ( t . name , " wb " ) do | csv |
csv << [ " refdes " , " quantity " , " manufacturer " , " part number " ] + ( sellers . collect { | seller | [ seller , " stock " ] + boards . collect { | qty | [ " unit price for #{ qty } board(s) " , " total price for #{ qty } board(s) " ] } } ) . flatten
parts . each do | part |
part [ 'qty' ] = part [ 'qty' ] . to_i # converted quantity from BOM string to integer for later calculation
line = [ part [ 'refdes' ] , part [ 'qty' ] , part [ 'manufacturer' ] , part [ 'manufacturer-id' ] ] # start CSV line
sellers . each_index do | seller_i | # go through all seller
seller = sellers [ seller_i ] # current seller
if part [ seller ] and ! part [ seller ] . empty? then
line << part [ seller ]
price = case seller
when 'aliexpress-id'
scrape_aliexpress ( part [ seller ] )
when 'digikey-id'
nil
when 'lcsc-id'
scrape_lcsc ( part [ seller ] )
else
nil
end
if price then
line << price [ :stock ]
boards . each_index do | boards_i |
quantity = boards [ boards_i ]
# find lowest price (considering the quantity and quantity prices)
unit = nil
total = nil
price [ :prices ] . each do | p |
if ! unit or ! total then
unit = p [ 1 ] . to_f
total = [ quantity , p [ 0 ] . to_i ] . max * unit
end
if [ quantity , p [ 0 ] . to_i ] . max * p [ 1 ] . to_f < total then
unit = p [ 1 ] . to_f
total = [ quantity , p [ 0 ] . to_i ] . max * unit
end
end
if " USD " == price [ :currency ] then
unit = usd2eur ( unit )
total = usd2eur ( total )
end
line << unit
unit_price [ seller_i ] [ boards_i ] += line [ - 1 ]
line << total
total_price [ seller_i ] [ boards_i ] += line [ - 1 ]
end
else
line += [ nil ] * ( 1 + boards . size * 2 )
end
else
line += [ nil ] * ( 2 + boards . size * 2 )
end # seller
end # sellers
csv << line
end # parts
# summary
line = [ nil ] * 4
sellers . each_index do | seller_i |
line += [ nil , nil ]
boards . each_index do | boards_i |
line << unit_price [ seller_i ] [ boards_i ]
line << total_price [ seller_i ] [ boards_i ]
end
end
csv << line
# details
csv << [ ]
csv << [ " all prices and stocks have been retrieved from Digikey, AliExpress, and LCSC on #{ Time . now . to_s } " ]
csv << [ " all prices are in EUR. prices originally in USD have been converted at a rate of #{ usd2eur ( 1 . 0 ) } " ]
end # CSV file
end # end cost file
2019-12-17 00:48:29 +01:00
# ================
# helper functions
# ================
# generate gnetlist bom2 and parse them
# arguments: schematic=schematic to use, attributes=attributes to use for generating bom2
# returns an array of hash. key is the attribute name, value is the attribute value
def bom2 ( schematic , attributes )
to_return = [ ]
# force attributes to be an array
attributes = case attributes
when String
[ attributes ]
when Array
attributes
else
[ attributes . to_s ]
end
# generate bom2
list = ` lepton-netlist --backend bom2 --backend-option attribs= #{ attributes * ',' } --quiet --output - #{ schematic } 2> /dev/null `
2019-12-17 15:54:02 +01:00
list = list . each_line { | l | '"' + l + '"' + '\n' }
list . gsub! ( / ^(.+) / , '"\1' )
list . gsub! ( / (.+)$ / , '\1"' )
list . gsub! ( / (?!http):(?! \/ \/ ) / , '\1":"\2' ) # protect the values between ':' (such as URLs)
2019-12-17 00:48:29 +01:00
# parse bom2
2020-01-12 17:10:36 +01:00
csv = CSV . parse ( list , col_sep : " : " , quote_char : '"' )
2019-12-17 00:48:29 +01:00
csv [ 1 .. - 1 ] . each do | row |
line = { }
row . each_index do | col |
line [ csv [ 0 ] [ col ] ] = row [ col ] unless row [ col ] == " unknown "
end
to_return << line
end
return to_return
end
2019-12-19 16:18:00 +01:00
# convert USD $ value to EUR €
def usd2eur ( usd )
return usd / eur2usd ( 1 . 0 )
end
# convert EUR € value to USD $
def eur2usd ( eur )
# get rate if we don't have already
unless $eur2usd then
url = " https://www.ecb.europa.eu/stats/eurofxref/eurofxref-daily.xml "
doc = Nokogiri :: HTML ( open ( URI . escape ( url ) , :allow_redirections = > :all ) )
$eur2usd = doc . xpath ( '//cube[@currency="USD"]' ) [ 0 ] . attr ( 'rate' ) . to_f
end
return eur * $eur2usd
end
# get prices from LCSC using SKU
def scrape_lcsc ( sku )
to_return = { stock : nil , currency : nil , prices : nil } # information to return (lot price, unit quantity, unit stock)
# get page
# the search page does not always list existing parts, instead it will try
url = " https://lcsc.com/pre_search/link?type=lcsc&&value= #{ sku } "
puts " scraping #{ url } " if $scrape_debug
2020-02-17 10:07:05 +01:00
doc = Nokogiri :: HTML ( URI . open ( url ) , :allow_redirections = > :all )
2019-12-19 16:18:00 +01:00
# verify if we have got a product page
if doc . xpath ( '//div[@id="product_details"]' ) . empty? then
puts " no product details: \n #{ doc } " if $scrape_debug
return nil
end
doc . xpath ( '//div[@id="product_details"]//div[contains(@class,"stock-number")]' ) . each do | element |
next unless element [ " data-stock " ]
to_return [ :stock ] = element [ " data-stock " ]
end
to_return [ :currency ] = " USD " # we could verify in the price, but I'm lazy
doc . xpath ( '//input[contains(@class,"salam-price")]' ) . each do | element |
next unless element [ " data-price " ]
to_return [ :prices ] = [ ]
element [ " data-price " ] . split ( " ],[ " ) . each do | price |
price . gsub! ( " [ " , " " )
price . gsub! ( " ] " , " " )
price = price . split ( " , " )
to_return [ :prices ] << [ price [ 0 ] . to_i , price [ 1 ] . to_f ]
end
end
return to_return
end
# get prices from AliExpress using SKU
def scrape_aliexpress ( sku )
to_return = { stock : nil , currency : nil , prices : nil } # information to return (lot price, unit quantity, unit stock)
# get page
url = " https://www.aliexpress.com/item/ #{ sku } .html "
puts " scraping #{ url } " if $scrape_debug
2020-02-17 10:07:05 +01:00
doc = Nokogiri :: HTML ( URI . open ( url ) , :allow_redirections = > :all )
2019-12-19 16:18:00 +01:00
# all the values can be found in javascript variables (stock is even only there)
js_docs = doc . xpath ( '//script' )
if js_docs . empty? then
puts " script not found: \n #{ doc } " if $scrape_debug
return nil
end
js_json = nil
js_docs . each do | js_doc |
js_text = js_doc . text
next unless js_text . include? ( " window.runParams = { " )
js_var = js_text . split ( 'data: ' ) [ 1 ] . split ( 'csrfToken: ' ) [ 0 ] . gsub ( / ,[ \ w \ n]*$ / , '' )
js_json = JSON . parse ( js_var )
end
unless js_json and js_json [ " priceModule " ] then
puts " priceModule not found: \n #{ js_json } " if $scrape_debug
return nil
end
# get currency
unless js_json [ " priceModule " ] [ " formatedPrice " ] then
puts " currency not found: \n #{ js_json [ 'priceModule' ] } " if $scrape_debug
return nil
end
if js_json [ " priceModule " ] [ " formatedPrice " ] . start_with? " US " then
to_return [ :currency ] = " USD "
elsif js_json [ " priceModule " ] [ " formatedPrice " ] . start_with? " EU " then
to_return [ :currency ] = " EUR "
end
# get quantity
unless js_json [ " quantityModule " ] and js_json [ " quantityModule " ] [ " totalAvailQuantity " ] then
puts " quantityModule not found: \n #{ js_json } " if $scrape_debug
return nil
end
to_return [ :stock ] = js_json [ " quantityModule " ] [ " totalAvailQuantity " ]
# get price
unless js_json [ " priceModule " ] [ " numberPerLot " ] and ( js_json [ " priceModule " ] [ " formatedActivityPrice " ] or js_json [ " priceModule " ] [ " formatedPrice " ] ) then
puts " priceModule malformatted: \n #{ js_json [ 'priceModule' ] } " if $scrape_debug
return nil
end
to_return [ :prices ] = [ ]
lot = js_json [ " priceModule " ] [ " numberPerLot " ] . to_i
price = js_json [ " priceModule " ] [ " formatedActivityPrice " ] || js_json [ " priceModule " ] [ " formatedPrice " ]
unless price then
puts " priceModule malformatted: \n #{ js_json [ 'priceModule' ] } " if $scrape_debug
return nil
end
price = price . split ( '$' ) [ 1 ] . to_f
to_return [ :prices ] << [ lot , price / lot ]
return to_return
end