Cmuelle8
Lonpos - Yesss Löser/Solver in Javascript
BearbeitenDas Programm errechnet 32.288 eindeutige Lösungen, wenn mit einem leeren Spielfeld gestartet wird. Mit den Optimierungen seit v0.9.4 werden dazu auf einer modernen Maschine unter 10.000 s benötigt, getestet mit Firefox 44.
Code
Bearbeiten/* Solver for LONPOS Crazy Cone, Yesss Logic Game Puzzle
v0.9.4 Mo, 2016-03-21, code rewrite
>RUN: * open Firefox (or any JS-capable browser)
`---- * type about:blank in the URL field
* hit Shift+F4 to open a JavaScript Console
* copy+paste this code, hit run
> OR: * embed this code in an empty html page
`---- * call doc.prompt() as an onload event
>USE: * Repeatedly click to enter tiles
`---- * Hit "Solve!" to find a solution
>This code is licensed PD (public domain):
`-----------------------------------------
* https://en.wikipedia.org/wiki/Public-domain_software
>More general solutions to exact cover problems:
`-----------------------------------------------
* https://en.wikipedia.org/wiki/Dancing_Links
* https://en.wikipedia.org/wiki/Exact_cover
>Implementation notes:
`---------------------
The board spots are represented by an array of an array
of chars, representing ten rows with columns increasing
from 1 in first row, to 10 in the last.
For each tile an anchor sphere is picked, mov'able to a
board spot (r, c). Connected spheres are coded relative
to (r, c) for each direction (x2 if flipside differs) a
tile may be put on board. movX(s..) set X into board s.
slv() queues its recursive invocation on nesting levels
above 7. Below it does a depth first search. Queue q is
processed using setTimeout() consecutively.
*/
var q=[]; // queue
function run(job,all,debug) {
if (typeof all==='undefined') all=false;
if (typeof debug==='undefined') debug=false;
var s=Array(10),sf={_:55},r,c,cost={},movs=[
movL,movA,
movU,
movW,movY,movS,movP,
movB,movD,movO,movG,movR,
];
doc.reset();
if (ini(s,sf,job)) {
// remove user configured tiles from movs
for (r=0;r<10;r++) for (c=0;c<=r;c++) sf[window["mov"+s[r][c]]]=1;
movs=movs.filter(function(e) { return !(e in sf); });
movs.forEach(function(e) { cost[e]=tst(s,sf,[e],true); });
movs.sort(function(a,b) { return cost[b]-cost[a]; });
}
debug?tst(s,sf,movs):slv(s,sf,movs,all);
if (q.length) doc.tq=setTimeout(q.shift(),0);
else doc.dump();
}
function prt_html(s,stats) {
var ret=[],r,c,i;
function fillspace(n) {
var stat=stats.pop();
if (typeof stat==='undefined') stat=" ";
ret.push('<td class="gap" '+(n>1?'colspan="'+n+'"':'')+'>'+stat+'</td>');
}
ret.push('<div><table>');
for (r=0,i=0;r<10;r++) {
ret.push('\n <tr>');
if (r<9) fillspace((19-((r+1)*2-1))/2);
for (c=0;c<=r;c++,i++) {
ret.push('<td tabindex="0" class="'+s[i]+'">'+s[i]/*s[r][c]*/+'</td>');
if (c<r) fillspace(1);
}
if (r<9) fillspace((19-((r+1)*2-1))/2);
ret.push('</tr>');
}
ret.push('</table></div>');
return ret.join("");
}
function prt(s) {
var ret=[],r,c;
for (r=0;r<10;r++) {
ret.push(Array(20-r*2).join(" "));
for (c=0;c<=r;c++) {
ret.push(s[r][c]+" ");
}
ret.push("\n");
}
return ret.join("");
}
function ini(s,sf,job) {
var valid=true,tds,r,c,i;
for (r=0;r<10;r++) s[r]=Array(r+2).join("_").match(/./g);
switch(job) {
case 59:
valid=movL(s,sf,1,0,"N","F")&&movG(s,sf,3,2,"W","F")&&
movU(s,sf,3,3,"E","F")&&movO(s,sf,6,5,"W","B")&&
movB(s,sf,6,6,"W","B");
break;
case 102:
valid=movL(s,sf,0,0,"N","F")&&movB(s,sf,2,2,"N","F");
break;
default:
tds=Array.prototype.filter.call(
document.querySelectorAll("div#input div table tbody tr td"),
function(e) { return e.className!="gap"; }
);
for (r=0,i=0;r<10;r++)
for (c=0;c<=r;c++)
if ("_"!=(s[r][c]=tds[i++].innerHTML.trim()))
sf._--;
}
return valid;
}
function slv_sched(s,sf,m,all) {
if (m.length>7) {
var t=Array(10),r;
for (r=0;r<10;r++) t[r]=s[r].slice();
s=t,sf={_:sf._},m=m.slice();
q.push(function() {
if (slv(s,sf,m,all)) q=[];
else doc.dump(false);
doc.tq=setTimeout(q.length?q.shift():doc.dump.bind(doc),0);
});
} else return slv(s,sf,m,all);
}
var ds={N:1,E:1,S:1,W:1},fs={F:1,B:1};
function slv(s,sf,m,all) {
m=m.slice(); var r,c,d,f,mov=m.pop();
for (r=0;r<10;r++)
for (c=0;c<=r;c++)
for (d in ds)
for (f in fs)
if (mov(s,sf,r,c,d,f)) {
if (m.length==0) { doc.addsol(s); if (!all) return true; }
else if (slv_sched(s,sf,m,all)) return true;
mov(s,sf,r-64,c-64,d,f); // take tile off board
}
return false;
}
function tst(s,sf,m,count) {
if (typeof count==='undefined') count=false;
var r,c,d,f,p,n=0;
for (p=0;p<m.length;p++)
for (r=0;r<10;r++)
for (c=0;c<=r;c++)
for (d in ds)
for (f in fs)
if (m[p](s,sf,r,c,d,f))
count?n++:doc.addsol(s,p),m[p](s,sf,r-64,c-64,d,f);
return n;
}
/* tiles with four axes of symmetry */
function movL(s,sf,r,c,d,f) { // limegreen
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"L",[
[r ,c ],[r+1,c ],
[r+1,c+1],[r+2,c+1]]);
}
break;
}
return false;
}
function movA(s,sf,r,c,d,f) { // gray
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"A",[
[r ,c ],[r ,c+1],
[r+1,c+1],
[r+2,c+1],[r+2,c+2]]);
}
break;
}
return false;
}
/* tiles with two axes of symmetry */
function movU(s,sf,r,c,d,f) { // purple
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"U",[
[r ,c],
[r+1,c],
[r+2,c],
[r+3,c]]);
case "E": return _mov(s,sf,"U",[
[r-3,c-3],[r-2,c-2],[r-1,c-1],[r,c]]);
}
break;
}
return false;
}
/* tiles with one axis of symmetry */
function movW(s,sf,r,c,d,f) { // white
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"W",[
[r ,c],
[r+1,c],[r+2,c+1]]);
case "E": return _mov(s,sf,"W",[
[r ,c ],
[r-1,c-1],[r,c-1]]);
case "S": return _mov(s,sf,"W",[
[r ,c],
[r-1,c],[r-2,c-1]]);
case "W": return _mov(s,sf,"W",[
[r ,c ],
[r+1,c+1],[r,c+1]]);
}
break;
}
return false;
}
function movY(s,sf,r,c,d,f) { // yellow
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"Y",[
[r ,c],[r+1,c+1],
[r+1,c],
[r+2,c],[r+3,c+1]]);
case "E": return _mov(s,sf,"Y",[
[r ,c ],[r+1,c ],
[r-1,c-1],
[r-2,c-2],[r-1,c-2]]);
case "S": return _mov(s,sf,"Y",[
[r ,c],[r-1,c-1],
[r-1,c],
[r-2,c],[r-3,c-1]]);
case "W": return _mov(s,sf,"Y",[
[r ,c ],[r-1,c ],
[r+1,c+1],
[r+2,c+2],[r+1,c+2]]);
}
break;
}
return false;
}
function movS(s,sf,r,c,d,f) { // skyblue
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"S",[
[r ,c ],[r+1,c+1],[r+2,c+2],
[r+1,c ],
[r+2,c ]]);
case "E": return _mov(s,sf,"S",[
[r ,c ],[r+1,c ],[r+2,c ],
[r-1,c-1],
[r-2,c-2]]);
case "S": return _mov(s,sf,"S",[
[r ,c ],[r-1,c-1],[r-2,c-2],
[r-1,c ],
[r-2,c ]]);
case "W": return _mov(s,sf,"S",[
[r ,c ],[r-1,c ],[r-2,c ],
[r+1,c+1],
[r+2,c+2]]);
}
break;
}
return false;
}
function movP(s,sf,r,c,d,f) { // pink
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"P",[
[r ,c],
[r+1,c],[r+2,c+1],
[r+3,c+1],[r+4,c+2]]);
case "E": return _mov(s,sf,"P",[
[r ,c ],
[r-1,c-1],[r,c-1],
[r-1,c-2],[r,c-2]]);
case "S": return _mov(s,sf,"P",[
[r ,c],
[r-1,c],[r-2,c-1],
[r-3,c-1],[r-4,c-2]]);
case "W": return _mov(s,sf,"P",[
[r ,c ],
[r+1,c+1],[r,c+1],
[r+1,c+2],[r,c+2]]);
}
break;
}
return false;
}
/* tiles without an axis of symmetry */
function movB(s,sf,r,c,d,f) { // babypink
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"B",[
[r ,c],
[r+1,c],
[r+2,c],[r+3,c+1],
[r+3,c]]);
case "E": return _mov(s,sf,"B",[
[r ,c ],
[r-1,c-1],
[r-2,c-2],[r-1,c-2],
[r-3,c-3]]);
case "S": return _mov(s,sf,"B",[
[r ,c],
[r-1,c],
[r-2,c],[r-3,c-1],
[r-3,c]]);
case "W": return _mov(s,sf,"B",[
[r ,c ],
[r+1,c+1],
[r+2,c+2],[r+1,c+2],
[r+3,c+3]]);
}
break;
case "B":
switch(d) {
case "N": return _mov(s,sf,"B",[
[r ,c],
[r+1,c],
[r+1,c-1],[r+2,c],
[r+3,c]]);
case "E": return _mov(s,sf,"B",[
[r ,c ],
[r-1,c-1],
[r-3,c-2],[r-2,c-2],
[r-3,c-3]]);
case "S": return _mov(s,sf,"B",[
[r ,c],
[r-1,c],
[r-1,c+1],[r-2,c],
[r-3,c]]);
case "W": return _mov(s,sf,"B",[
[r ,c ],
[r+1,c+1],
[r+3,c+2],[r+2,c+2],
[r+3,c+3]]);
}
break;
}
return false;
}
function movD(s,sf,r,c,d,f) { // darkblue
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"D",[
[r ,c],
[r+1,c],
[r+2,c],
[r+3,c],[r+4,c+1]]);
case "E": return _mov(s,sf,"D",[
[r ,c ],
[r-1,c-1],
[r-2,c-2],
[r-3,c-3],[r-2,c-3]]);
case "S": return _mov(s,sf,"D",[
[r ,c],
[r-1,c],
[r-2,c],
[r-3,c],[r-4,c-1]]);
case "W": return _mov(s,sf,"D",[
[r ,c ],
[r+1,c+1],
[r+2,c+2],
[r+3,c+3],[r+2,c+3]]);
}
break;
case "B":
switch(d) {
case "N": return _mov(s,sf,"D",[
[r ,c],
[r+1,c],
[r+2,c],
[r+2,c-1],[r+3,c]]);
case "E": return _mov(s,sf,"D",[
[r ,c ],
[r-1,c-1],
[r-2,c-2],
[r-4,c-3],[r-3,c-3]]);
case "S": return _mov(s,sf,"D",[
[r ,c],
[r-1,c],
[r-2,c],
[r-2,c+1],[r-3,c]]);
case "W": return _mov(s,sf,"D",[
[r ,c ],
[r+1,c+1],
[r+2,c+2],
[r+4,c+3],[r+3,c+3]]);
}
break;
}
return false;
}
function movO(s,sf,r,c,d,f) { // orange
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"O",[
[r ,c],
[r+1,c],
[r+2,c],[r+3,c+1]]);
case "E": return _mov(s,sf,"O",[
[r ,c ],
[r-1,c-1],
[r-2,c-2],[r-1,c-2]]);
case "S": return _mov(s,sf,"O",[
[r ,c],
[r-1,c],
[r-2,c],[r-3,c-1]]);
case "W": return _mov(s,sf,"O",[
[r ,c ],
[r+1,c+1],
[r+2,c+2],[r+1,c+2]]);
}
break;
case "B":
switch(d) {
case "N": return _mov(s,sf,"O",[
[r ,c],
[r+1,c],
[r+1,c-1],[r+2,c]]);
case "E": return _mov(s,sf,"O",[
[r ,c ],
[r-1,c-1],
[r-3,c-2],[r-2,c-2]]);
case "S": return _mov(s,sf,"O",[
[r ,c],
[r-1,c],
[r-1,c+1],[r-2,c]]);
case "W": return _mov(s,sf,"O",[
[r ,c ],
[r+1,c+1],
[r+3,c+2],[r+2,c+2]]);
}
break;
}
return false;
}
function movG(s,sf,r,c,d,f) { // green
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"G",[
[r ,c],
[r+1,c],
[r+2,c],[r+3,c+1],
[r+4,c+1]]);
case "E": return _mov(s,sf,"G",[
[r ,c ],
[r-1,c-1],
[r-2,c-2],[r-1,c-2],
[r-2,c-3]]);
case "S": return _mov(s,sf,"G",[
[r ,c],
[r-1,c],
[r-2,c],[r-3,c-1],
[r-4,c-1]]);
case "W": return _mov(s,sf,"G",[
[r ,c ],
[r+1,c+1],
[r+2,c+2],[r+1,c+2],
[r+2,c+3]]);
}
break;
case "B":
switch(d) {
case "N": return _mov(s,sf,"G",[
[r ,c],
[r+1,c],
[r+1,c-1],[r+2,c],
[r+2,c-1]]);
case "E": return _mov(s,sf,"G",[
[r ,c ],
[r-1,c-1],
[r-3,c-2],[r-2,c-2],
[r-4,c-3]]);
case "S": return _mov(s,sf,"G",[
[r ,c],
[r-1,c],
[r-1,c+1],[r-2,c],
[r-2,c+1]]);
case "W": return _mov(s,sf,"G",[
[r ,c ],
[r+1,c+1],
[r+3,c+2],[r+2,c+2],
[r+4,c+3]]);
}
break;
}
return false;
}
function movR(s,sf,r,c,d,f) { // red
switch(f) {
case "F":
switch(d) {
case "N": return _mov(s,sf,"R",[
[r ,c],
[r+1,c],[r+2,c+1],
[r+2,c],[r+3,c+1]]);
case "E": return _mov(s,sf,"R",[
[r ,c ],
[r-1,c-1],[r ,c-1],
[r-2,c-2],[r-1,c-2]]);
case "S": return _mov(s,sf,"R",[
[r ,c],
[r-1,c],[r-2,c-1],
[r-2,c],[r-3,c-1]]);
case "W": return _mov(s,sf,"R",[
[r ,c ],
[r+1,c+1],[r ,c+1],
[r+2,c+2],[r+1,c+2]]);
}
break;
case "B":
switch(d) {
case "N": return _mov(s,sf,"R",[
[r ,c],
[r ,c-1],[r+1,c],
[r+1,c-1],[r+2,c]]);
case "E": return _mov(s,sf,"R",[
[r ,c ],
[r-2,c-1],[r-1,c-1],
[r-3,c-2],[r-2,c-2]]);
case "S": return _mov(s,sf,"R",[
[r ,c],
[r ,c+1],[r-1,c],
[r-1,c+1],[r-2,c]]);
case "W": return _mov(s,sf,"R",[
[r ,c ],
[r+2,c+1],[r+1,c+1],
[r+3,c+2],[r+2,c+2]]);
}
break;
}
return false;
}
/* board detection routines */
function isfree(s,r,c) {
return r>=0 && r<10 && c>=0 && c<=r && s[r][c]=="_";
}
function ismrkd(s,r,c) {
return r>=0 && r<10 && c>=0 && c<=r && s[r][c]=="-";
}
/* board modifying routine */
function _mov(s,sf,t,rcs) {
var valid=true,rc,r,c,i;
if (rcs[0][0]<-32) {
for (i=0;i<rcs.length;i++) rc=rcs[i],s[rc[0]+64][rc[1]+64]="_",sf._++;
return;
}
for (i=0;i<rcs.length;i++) {
rc=rcs[i];
if (isfree(s,rc[0],rc[1])) s[rc[0]][rc[1]]=t,sf._--;
else {
valid=false;
break;
}
}
/*** optimization start ***/
if (sf._>3&&valid) {
for (r=0;r<10;r++) {
for (c=0;c<=r;c++) {
if (isfree(s,r,c)) { // N:
if (isfree(s,r+1,c+1))
{ if (isfree(s,r+2,c+1)) s[r+2][c+1]="-",sf._--;
if (isfree(s,r,c+1)) s[r][c+1]="-",sf._--;
if (isfree(s,r+2,c+2)&&isfree(s,r-1,c-1))
s[r+2][c+2]="-",s[r-1][c-1]="-",sf._--,sf._--;
if (ismrkd(s,r+2,c+1)||ismrkd(s,r,c+1)
||ismrkd(s,r+2,c+2)&&ismrkd(s,r-1,c-1)) s[r+1][c+1]="-",sf._--;
} // N fin; E:
if (isfree(s,r+1,c))
{ if (isfree(s,r,c-1)) s[r][c-1]="-",sf._--;
if (isfree(s,r+2,c+1)) s[r+2][c+1]="-",sf._--;
if (isfree(s,r+2,c)&&isfree(s,r-1,c))
s[r+2][c]="-",s[r-1][c]="-",sf._--,sf._--;
if (ismrkd(s,r,c-1)||ismrkd(s,r+2,c+1)
||ismrkd(s,r+2,c)&&ismrkd(s,r-1,c)) s[r+1][c]="-",sf._--;
} // E fin; S:
if (isfree(s,r-1,c-1))
{ if (isfree(s,r-2,c-1)) s[r-2][c-1]="-",sf._--;
if (isfree(s,r,c-1)) s[r][c-1]="-",sf._--;
if (ismrkd(s,r-2,c-1)||ismrkd(s,r,c-1)) s[r-1][c-1]="-",sf._--;
} // S fin; W:
if (isfree(s,r-1,c))
{ if (isfree(s,r,c+1)) s[r][c+1]="-",sf._--;
if (isfree(s,r-2,c-1)) s[r-2][c-1]="-",sf._--;
if (ismrkd(s,r,c+1)||ismrkd(s,r-2,c-1)) s[r-1][c]="-",sf._--;
} // W fin
if (ismrkd(s,r+1,c+1)||ismrkd(s,r+1,c)
||ismrkd(s,r-1,c-1)||ismrkd(s,r-1,c))
{ s[r][c]="-",sf._--;
if (r>0) { r--; r--; break; }
}
}
}
}
// free and unmarked spots are uncoverable
valid=(sf._==0);
for (r=0;r<10;r++) for (c=0;c<=r;c++) if (s[r][c]=="-") s[r][c]="_",sf._++;
}
/*** optimization end ***/
if (!valid) while (i>0) rc=rcs[--i],s[rc[0]][rc[1]]="_",sf._++;
return valid;
}
/* output helper and stats tracker */
var doc={
mc:'', // lock a colored tile for mouse input
tq:0, // timeout id to clear on user request
max:32, // limit decorated solutions in output
s:{},
t:{},
reset:function(p) {
with (this) {
if (typeof p==='undefined') s={},t={},q=[],clearTimeout(tq),setmc(),p=0;
if (t[p]==null) t[p]={t0:performance.now(),fifo:[]};
if (s[p]==null) s[p]={},t[p]["tprev"]=t[p].t0,t[p]["length"]=0;
}
},
addsol:function(a,p) {
var r=[],i;
if (typeof p==='undefined') p=0;
if (typeof a==='string') r=[a];
else for (i=0;i<a.length;i++) r.push(a[i].join(""));
with (this) {
r=r.join(""); reset(p);
if (s[p][r]==null) {
a=performance.now();
s[p][r]=a-t[p].t0/*.tprev*/;
t[p].tprev=a;
t[p].length++;
while (t[p].fifo.length>15)
t[p].fifo.shift();
t[p].fifo.push(r);
}
}
},
col:{
_: "black",
L: "limegreen",
A: "gray",
U: "purple",
W: "white",
Y: "yellow",
S: "skyblue",
P: "magenta",
B: "lightpink",
D: "mediumblue",
O: "orange",
G: "darkgreen",
R: "red",
},
newgrad:function() {
var r=[],a=[],t,i,b;
for (t in doc.col) {
a.push("div td."+t+" { background:"+doc.col[t]+";");
a.push(" background:radial-gradient("+doc.col[t]+" 50%,black);");
a.push(a[a.length-2].replace(/^div/,"div#input div"));
a.push(a[a.length-2].replace(/,black/,",#444"));
}
for (i=0;i<a.length;i++,i++) {
r.push(a[i]);
for (b in {webkit:1,khtml:1,moz:1,ms:1,o:1})
r.push(a[i+1].replace(/(radial-)/,"-"+b+"-$1"));
r.push(a[i+1]);
r.push("}");
}
return r.join("\n");
},
newdoc:function() {
var stylesheet=[
'<style type="text/css">',
"div table {",
" border-spacing:0px;",
" border:1px solid white;",
" text-align:center;",
"}",
"div td.gap {",
" color:black;",
" font-size:small;",
" width:unset;",
"}",
"div td {",
" cursor:default;",
" padding:0px;",
" width:1em;",
"}",
"div#input div table { background:#444; }",
"div#input div table,div#input p,div#output div,form button { float:left; }",
"div#input p,form button { margin:0px; margin-left:16px; }",
"div#output div table { background:black; }",
"div#output div:hover td.gap { color:white; }",
"div#output p,div#_ p { clear:both; position:relative; top:1em; }",
"form button#srd_button { float:none; }",
"form div { margin-top:.5em; }",
this.newgrad(),
"</style>",
];
document.head.innerHTML=stylesheet.join("\n");
document.body.innerHTML=
'<div id="input"></div><div id="_"></div><div id="output"></div>';
},
validate:function(a) {
a=a.toUpperCase().match(RegExp(Object.keys(this.col).join("|"),"g"));
if (a==null) a=[];
if (a.length>55) a.length=55;
while (a.length<55) a.push("_");
return a.join("");
},
setmc:function(mc) {
var f1=document.getElementById("f1");
if (f1!=null) {
if (f1["f1"]==null) f1["f1"]=f1.innerHTML;
this.mc=mc=typeof mc==='undefined'||mc==this.mc?'':mc;
f1.innerHTML=mc.length?"Click fields to set to "+this.col[mc]+".":f1.f1;
}
},
prompt:function(p) {
var d=document,id=d.getElementById.bind(d),
nm=d.getElementsByTagName.bind(d),tds,i,l;
this.reset();
this.newdoc();
this.addsol(typeof p!=='undefined'?this.validate(p):(p="",
"YYYAAYUAYLUAALLUGGDLSUGGD_SSRGDD_S_SRRWD___PPRRWW___PPP"
));
this.dump();
id("input").innerHTML=id("output").innerHTML;
id("input").appendChild(nm("p")[0]);
id("output").innerHTML="";
tds=nm("td"),l=tds.length-1;
for (i=0;i<tds.length;i++) {
if (tds[i].className=="gap") {
tds[i].innerHTML="";
tds[i].oncontextmenu=function() { return false; }
} else {
tds[l].e=tds[i]; l=i;
tds[i].onkeypress=function(e) {
var k=String.fromCharCode(e.charCode).toUpperCase();
if (k in doc.col) this.innerHTML=this.className=k;
this.e.focus();
}
tds[i].onclick=function() {
var k=Object.keys(doc.col),i=k.indexOf(this.innerHTML)+1;
this.innerHTML=this.className=doc.mc==""?k[i%k.length]:doc.mc;
}
tds[i].oncontextmenu=function() {
this.innerHTML=this.className="_";
return false;
}
}
}
nm("p")[0].innerHTML=[
"<ul><li id='f1'>Repeatedly click fields to change tile setup.</li>",
"<li>Use right click to empty a field.</li>",
"<li>Click <i>Solve!</i> to find a solution.</li>",
"<li>Alternatively use keyboard to assign tiles:</li>",
function() {
var r=["<ul><table><tr>"],t;
for (t in doc.col) {
r.push('<td class="'+t+'" onclick="doc.setmc(\''+t+'\')">');
r.push('<b title="'+doc.col[t]+': '+t+'">'+t+'</b></td>');
}
r.push("</tr></table></ul></ul><form>");
return r.join("");
}(),
'<input type="text" id="in" size="35" maxlength="255" value="'+p+'"/>',
'<button type="button" id="srd_button">Read!</button>',
'<div><button type="button" id="run_button">Solve!</button>',
'<button type="button" id="all_button">Find all!</button>',
'<button type="button" id="emt_button">Empty fields!</button>',
'<button type="button" id="rst_button">Reset!</button></div></form>',
].join("\n");
id("all_button").onclick=function() { run(0,true); }
id("run_button").onclick=function() { run(0,false); }
id("srd_button").onclick=function() { doc.prompt(id("in").value); }
id("rst_button").onclick=function() { doc.prompt(); }
id("emt_button").onclick=function() { id("in").value="";
Array.prototype.filter.call(
document.querySelectorAll("div#input div table tbody tr td"),
function(e) { return e.className!="gap"; }
).forEach(
function(e) { e.className=e.innerHTML="_"; }
);
}
},
dump:function(fin) {
if (typeof fin==='undefined') fin=true;
var d=document,e=d.createElement("div"),id=fin?"output":"_";
if (d.getElementById(id)==null) this.newdoc();
var o=d.getElementById(id),p,s,tc=0,count=[];
for (p in this.t) tc+=(count[p]=this.t[p].length);
var t=performance.now(),tt=((t-this.t[0].t0)/1E3).toFixed(2),r=[
"<p>","Showing ",Math.min(tc,this.max)," solutions ",
"out of "+count.join("+")+(count.length>1?" (="+tc+")":""),
"found. Total time spent solving: ",tt," s.",
tc>this.max?"See below for all.":"","</p>",
];
function _dump(a) {
if (typeof a!=='undefined') {
r.push("<pre>");
if (Array.isArray(a)) {
r[2]=tc,r[1]=r[4]=r[8]="",r[5]="found after ";
r=r.concat(a);
o.innerHTML="";
} else {
r[2]="all",r[8]="";
for (p in a) r=r.concat(Object.keys(a[p]));
}
r.push("</pre>");
}
e.innerHTML=r.join("\n");
while (e.firstChild) o.appendChild(e.firstChild);
}
if (!fin) _dump(this.t[p].fifo);
else {
_dump();
o.previousSibling.innerHTML="";
for (p=0,i=0,j=0;p<count.length;j=0,p++) {
for (s in this.s[p]) {
if (i==this.max) break;
e.innerHTML=prt_html(s,[
// stats shown on mouseover
((performance.now()-t)/1E3).toFixed(2)+" s 2rendr",
" "," ",((this.s[p][s])/1E3).toFixed(2)+" s 2solve",
(++i)+(p>0?" ("+(++j)+", p"+(p+1)+")":""),
]);
o.appendChild(e.firstChild);
}
}
if (tc>this.max) _dump(this.s);
}
},
}
/* run(jobnr,find_all?=false,test?=false) */
//run(102,false);
doc.prompt();
gpx2svg.py
BearbeitenCode
Bearbeiten#!/usr/bin/python3
# Copyright (c) 2012-2013 Tobias Leupold <tobias.leupold@web.de>
# Copyright (c) 2014 Cmuelle8 <https://de.wikipedia.org/wiki/Spezial:E-Mail_senden/Cmuelle8>
#
# gpx2svg - Convert GPX formatted geodata to Scalable Vector Graphics (SVG)
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation in version 2 of the License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__version__ = '0.2.1'
import argparse
import math
import sys
from functools import reduce
from hashlib import md5
from os.path import isfile
from xml.dom.minidom import parse as parseXml
#from shutil import get_terminal_size as termsize
def debug(*objs):
#print('=== DEBUG: === ', *objs, file=sys.stderr)
return
def parseGpx(gpxFile, gpsData):
"""Get the latitude and longitude data of all track segments in a GPX file"""
# Get the XML information
try:
if gpxFile == '-':
gpx = parseXml(sys.stdin)
_id = md5(gpx.toxml()).hexdigest()
else:
gpx = parseXml(gpxFile)
_id = gpxFile
except IOError as error:
print('Error while reading file: %s. Terminating.' % error, file = sys.stderr)
sys.exit(1)
except:
print('Error while parsing XML data:', file = sys.stderr)
print(sys.exc_info(), file = sys.stderr)
print('Terminating.', file = sys.stderr)
sys.exit(1)
# Iterate over all tracks, track segments and points
for track in gpx.getElementsByTagName('trk'):
for trackseg in track.getElementsByTagName('trkseg'):
trackSegData = []
for point in trackseg.getElementsByTagName('trkpt'):
trackSegData.append(
(float(point.attributes['lon'].value),
float(point.attributes['lat'].value))
)
# Leave out empty segments
if(trackSegData != []):
gpsData.append( ( trackSegData, _id ) )
for point in gpx.getElementsByTagName('wpt'):
gpsData.append( (
[ (float(point.attributes['lon'].value),
float(point.attributes['lat'].value)) ],
_id + point.getElementsByTagName('name')[0].firstChild.nodeValue )
)
gpx.unlink()
return gpsData
def mercatorProjection(gpsData, meridian = 0, r = 6378135):
"""Do the mercator projection for a GPS dataset"""
projectedData = []
for segment in gpsData:
projectedSegment = []
for coord in segment[0]:
projectedSegment.append(mercator(coord, meridian, r))
projectedData.append((projectedSegment, segment[1]))
return(projectedData)
def mercator(coord, meridian, r):
"""Calculate the Mercator projection of a coordinate pair"""
pi = 3.14159265
meridian = meridian * pi / 180.0
x = r * ((coord[0] * pi / 180.0) - meridian)
y = r * math.log(math.tan((pi / 4.0) + ((coord[1] * pi / 180.0) / 2.0)))
return((x, y))
def moveProjectedData(gpsData):
"""Move a dataset to 0,0 and return it with the resulting width and height"""
# Find the minimum and maximum x and y coordinates
minX = maxX = gpsData[0][0][0][0]
minY = maxY = gpsData[0][0][0][1]
for segment in gpsData:
for coord in segment[0]:
if coord[0] < minX:
minX = coord[0]
if coord[0] > maxX:
maxX = coord[0]
if coord[1] < minY:
minY = coord[1]
if coord[1] > maxY:
maxY = coord[1]
# Move the GPS data to 0,0
movedGpsData = []
for segment in gpsData:
movedSegment = []
for coord in segment[0]:
movedSegment.append((coord[0] - minX, coord[1] - minY))
movedGpsData.append((movedSegment, segment[1]))
# Return the moved data and it's width and height
return(movedGpsData, maxX - minX, maxY - minY)
def searchCircularSegments(gpsData):
"""Splits a GPS dataset to tracks that are circular and other tracks"""
circularSegments = []
straightSegments = []
for segment in gpsData:
if segment[0][0] == segment[0][-1]:
circularSegments.append(segment)
else:
straightSegments.append(segment)
return(circularSegments, straightSegments)
def combineSegmentPairs(gpsData, args):
"""Combine segment pairs to one bigger segment"""
combinedData = []
# Walk through the GPS data and search for segment pairs that end with the starting point of another track
while len(gpsData) > 0:
# Get one segment from the source GPS data
firstTrackData = gpsData.pop()
foundMatch = False
# Try to find a matching segment
for i in range(len(gpsData)):
if firstTrackData[1] not in args.raw_trksegs \
and firstTrackData[1] == gpsData[i][1] \
and firstTrackData[0][-1] == gpsData[i][0][0]:
# There is a matching segment, so break here
foundMatch = True
break
if foundMatch == True:
# We found a pair of segments with one shared point, so pop the data of the second
# segment from the source GPS data and create a new segment containing all data, but
# without the overlapping point
firstTrackData[0].pop()
combinedData.append((firstTrackData[0] + gpsData.pop(i)[0], firstTrackData[1]))
else:
# No segment with a shared point was found, so just append the data to the output
combinedData.append(firstTrackData)
return(searchCircularSegments(combinedData))
def combineSegments(gpsData, args):
"""Combine all segments of a GPS dataset that can be combined"""
# Search for circular segments. We can't combine them with any other segment.
circularSegments, remainingSegments = searchCircularSegments(gpsData)
# Search for segments that can be combined
while True:
# Look how many tracks we have now
segmentsBefore = len(remainingSegments)
# Search for segments that can be combined
newCircularSegments, remainingSegments = combineSegmentPairs(remainingSegments, args)
# Add newly found circular segments to processedSegments -- they can't be used anymore
circularSegments = circularSegments + newCircularSegments
if segmentsBefore == len(remainingSegments):
# combineSegmentPairs() did not reduce the number of tracks anymore,
# so we can't combine more tracks and can stop here
break
return(circularSegments + remainingSegments)
def scaleCoords(coord, height, scale):
"""Return a scaled pair of coordinates"""
return(coord[0] * scale, (coord[1] * -1 + height) * scale)
def createCoordString(segment, height, scale):
"""Create the coordinate part of an SVG path string from a GPS data segment"""
coordString = ''
for coord in segment:
x, y = scaleCoords(coord, height, scale)
coordString = coordString + ' ' + str(round(x,2)) + ',' + str(round(y,2))
return coordString
def createPathString(drawCommand, style):
"""Return a complete path element for a draw command string"""
s = ' <path '
if len(style) > 0:
s += 'style="' + style + '" '
s += 'd="' + drawCommand + '"/>\n'
return s
def createPointString(d, _id):
"""Return a complete circle element for the data in d"""
[x, y] = [float(n) for n in d.split(',')]
s = '<circle cx="' + str(x) + '" cy="' + str(y) + '" r="5"/>\n'
if _id and len(_id) > 0:
s += '<text x="' + str(x+10) + '" y="' + str(y-10) + \
'" style="text-anchor:left">' + _id + '</text>\n'
return s
def writeSvgData(gpsData, width, height, args):
"""Output the SVG data -- quick 'n' dirty, without messing around with dom stuff ;-)"""
# Calculate the scale factor we need to fit the requested maximal output size
if width <= args.max_pixels and height <= args.max_pixels:
scale = 1
elif width > height:
scale = args.max_pixels / width
else:
scale = args.max_pixels / height
# Header data
fp = args.o[0]
fp.write('''<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="%spx" height="%spx">
<defs>
<filter id="fshadow" x="0" y="0" width="200%s" height="200%s">
<feOffset result="offOut" in="SourceAlpha" dx="0" dy="0" />
<feGaussianBlur result="blurOut" in="offOut" stdDeviation="8" />
<feColorMatrix result="blurOut" in1="blurOut" type="matrix" values="-1 0 0 0 1 0 -1 0 0 1 0 0 -1 0 1 0 0 0 1 0"/>
<feBlend result="blurOut" in="blurOut" in2="blurOut" mode="multiply" />
<feBlend result="blurOut" in="blurOut" in2="blurOut" mode="multiply" />
<feBlend in="SourceGraphic" in2="blurOut" mode="normal" />
</filter>
</defs>
''' % (width * scale, height * scale, '%', '%'))
# Draw canvas
if args.canvas:
fp.write('<rect width="100%" height="100%" style="' + args.canvas + '"/>\n')
# Process all track segments and generate ids and path drawing commands for them
# First, we split the data to circular and straight segments
circularSegments, straightSegments = searchCircularSegments(gpsData)
realCircularSegments = []
singlePoints = []
segjobs = []
for segment in circularSegments:
if len(segment[0]) == 1:
# It's a single point
if not segment[1].startswith(tuple(args.drop_points.keys())):
# We want to keep single points, so add it to singlePoints
singlePoints.append(segment)
else:
# We can leave out the last point, because it's equal to the first one
segment[0].pop()
realCircularSegments.append(segment)
circularSegments = realCircularSegments
# Draw single points if requested
# Determine the remaining segment list jobs, filter empty lists
if len(circularSegments) > 0:
segjobs.append(circularSegments)
if len(straightSegments) > 0:
segjobs.append(straightSegments)
if len(singlePoints) > 0:
segjobs.append(singlePoints)
# Draw all 'circular' segments and then all 'straight' segments
for seglst in segjobs:
ret = [ ( seg[1], str( 'M' if not seglst is singlePoints else '' )
+ createCoordString(seg[0], height, scale)
+ str( 'Z' if seglst is circularSegments else '' ) )
for seg in seglst ]
debug('writeSvgData: before merge: ',
[[k, 'bytes = '+str(len(v))] for k, v in ret],
'\n' + str(ret)[0:180], '...\n\n')
ret = reduce(lambda s1,s2: dict(
list( [( s1[0], [s1[1]] )] if isinstance(s1, tuple) else s1.items() )
+ [( s2[0], [s2[1]] if not s2[0] in s1 else
list( [s1[1]] if isinstance(s1, tuple) else s1[s2[0]] )
+ [s2[1]]
)]
# [(s1[0], [s1[1]]), (s2[0], [s2[1]])] if isinstance(s1, tuple) and not s1[0] == s2[0]
#else [(s1[0], [s1[1], s2[1]])] if isinstance(s1, tuple)
#else list(s1.items()) +
# [(s2[0], [s2[1]] if not s2[0] in s1 else s1[s2[0]] + [s2[1]])]
),
ret) if len(ret) > 1 else dict( [(ret[0][0], [ret[0][1]])] )
debug('writeSvgData: after merge: ',
[[k, 'segments = '+str(len(v))] for k, v in ret.items()],
'\n' + str(ret)[0:180], '...\n\n')
def tosvg(t, segs):
if seglst is circularSegments:
gid = 'closedPaths_' + t
else:
gid = 'straightPaths_' + t
if seglst is singlePoints:
gid = 'singlePoints_' + t #t.rsplit('.gpx', 1)[0]
f = ' filter="url(#fshadow)"'
s = 'stroke:none;fill:black;font:caption;font-size:50px;'
else:
f = ''
s = args.styles[t]
if t in args.zip_trksegs and len(segs) > 1:
gid = 'complexPath_' + t
segs = [''.join(segs)]
r = ['<g id="' + gid + '" style="' + s + '"' + f + '>\n']
r += [ createPathString(seg, '') if not seglst is singlePoints
else createPointString(seg, t.rsplit('.gpx', 1)[1])
for seg in segs ]
r += ['</g>\n']
return r
debug('writeSvgData: before tosvg(): ',
[[k, 'segments = '+str(len(v))] for k, v in ret.items()],
'\n' + str(ret)[0:180], '...\n\n')
# render svg dom nodes
ret = {t: tosvg(t, segs) for t, segs in ret.items()}
debug('writeSvgData: after tosvg(): ',
[[k, 'segments = '+str(len(v))] for k, v in ret.items()],
'\n' + str(ret)[0:180], '...\n\n')
# yield input file order when writing
[fp.write(''.join(ret[t])) for i in args.i for t in ret if t.startswith(i)]
# Close the XML
fp.write('</svg>\n')
# Close the file if necessary
if fp != sys.stdout:
fp.close()
class CHF(argparse.ArgumentDefaultsHelpFormatter):
def __init__(self, prog):
return argparse.HelpFormatter.__init__(self, prog,
# works, but is not needed currently
#width = termsize(fallback=(80, 25)).columns - 2,
max_help_position = 8)
def _split_lines(self, text, width):
if text.count('default:') > 1:
text = text[0:text.rfind(' (default:')]
text = text.replace(' (default:', '\n(default:')
if '\n' in text:
text = text.splitlines()
else:
text = argparse.HelpFormatter._split_lines(self, text, width)
text.append('')
return text
def main():
# Setup the command line argument parser
cmdArgParser = argparse.ArgumentParser(
description = 'Convert GPX formatted geodata to Scalable Vector Graphics (SVG)',
epilog = 'gpx2svg %s - http://nasauber.de/opensource/gpx2svg/' % __version__,
formatter_class = CHF
)
# 'append' action appends to default value, but it needs replacement here, handle it below
cmdArgParser.add_argument('-i', '--input-files', dest = 'i',
metavar = 'FILE [FLGS]', nargs = '+', action = 'append',
help = 'GPX input file(s), default: STDIN\n' +
' FLGS ::= FLAG | FLAG FLGS\n' +
' FLAG ::= d | r | z\n\n' +
'd Don\'t draw single points as a circle of 10px diameter\n' +
'r Don\'t combine tracksegs that end with the starting point of another\n' +
'z Create one svg path ONLY, track segments will become subpaths'
)
cmdArgParser.add_argument('-o', '--output-file', dest = 'o', type = argparse.FileType('w'),
metavar = 'FILE', nargs = 1, default = [sys.stdout],
help = 'SVG output file, default: STDOUT\n'
)
cmdArgParser.add_argument('-c', '--canvas',
metavar = 'STYLE', nargs = '?', const = 'fill:#E0E0E0', default = None,
help = 'Enable canvas using \'fill:#E0E0E0\' or fill with STYLE if given'
)
cmdArgParser.add_argument('-m', '--max-pixels', type = int,
metavar = 'PIXELS', nargs = '?', const = 3000, default = 3000,
help = 'Maximum width or height of the SVG output in pixels'
)
cmdArgParser.add_argument('-s', '--styles', dest = 's',
metavar = 'STYLE', nargs = '+', default = ['fill:none;stroke:black;'],
help = 'Define SVG-Style(s) for gpx input file(s)'
)
addsp = "stroke-linecap:round;stroke-linejoin:round;"
cmdArgParser.add_argument('-sc', '--cycling-map-styles', dest = 's',
metavar = 'STYLE', action = "store_const", const = [
'fill:none;stroke-width:2.4;stroke:magenta;stroke-opacity:0.9;' + addsp,
'fill:none;stroke-width:4.8;stroke:orange;stroke-opacity:0.9;' + addsp,
'fill:none;stroke-width:4.8;stroke:blue;stroke-opacity:0.9;' + addsp,
'fill:none;stroke-width:4.8;stroke:red;stroke-opacity:0.9;' + addsp ],
help = 'Use styles preset: mimic "cycling overlay" used by waymarkedtrails.org'
)
cmdArgParser.add_argument('-sm', '--cycling-locator-mix-styles', dest = 's',
metavar = 'STYLE', action = "store_const", const = [
'fill:#FDFBEA;fill-rule:evenodd;stroke-width:1.2;stroke:#646464;stroke-opacity:1.0;',
'fill:none;stroke-width:1.2;stroke:#646464;stroke-opacity:1.0;' + addsp,
'fill:none;stroke-width:2.4;stroke:magenta;stroke-opacity:0.9;' + addsp,
'fill:none;stroke-width:4.8;stroke:orange;stroke-opacity:0.9;' + addsp,
'fill:none;stroke-width:4.8;stroke:blue;stroke-opacity:0.9;' + addsp,
'fill:none;stroke-width:4.8;stroke:red;stroke-opacity:0.9;' + addsp ],
help = 'Use styles preset: mash-up locator map underlay with cycle map overlay'
)
cmdArgParser.add_argument('-sl', '--locator-map-styles', dest = 's',
metavar = 'STYLE', action = "store_const", const = [
'fill:#FDFBEA;fill-rule:evenodd;stroke-width:1.2;stroke:#646464;stroke-opacity:1.0;', # area style
'fill:none;stroke-width:1.2;stroke:#646464;stroke-opacity:1.0;' + addsp, # street paths (thin black)
'fill:none;stroke-width:7.2;stroke:#E0939B;stroke-opacity:1.0;' + addsp, # contextual path (soft red)
'fill:none;stroke-width:9.6;stroke:#C12838;stroke-opacity:1.0;' + addsp ], # main path (strong red)
help = 'Use styles preset: mimic "locator map" used by wikipedia.org, for\n' + \
'4 input files: area, street, contextual, main styles are applied in order\n' + \
'3 input gpx_s: area, contextual and main path styles ..\n' + \
'two gpx files: area and main path styles ..\n' + \
'one input gpx: only area styles is applied'
)
cmdArgParser.add_argument('-st', '--trivial-krgb-map-styles', dest = 's',
metavar = 'STYLE', action = "store_const", const = [
'fill:none;stroke-width:1.0;stroke:#000000;stroke-opacity:1.0;',
'fill:none;stroke-width:4.0;stroke:#4242FF;stroke-opacity:1.0;',
'fill:none;stroke-width:3.0;stroke:#42FF42;stroke-opacity:1.0;',
'fill:none;stroke-width:2.0;stroke:#FF4242;stroke-opacity:1.0;' ],
help = 'Use styles preset: trivial style map with four colors'
)
# Get the given arguments
args = cmdArgParser.parse_args()
if not args.i:
args.i = [['-']]
args.drop_points = {}
args.raw_trksegs = {}
args.zip_trksegs = {}
args._i = []
gpsData = []
for i in [u for v in args.i for u in v]:
if i == '-' or isfile(i):
# Get latitude and longitude data from input file
gpsData = parseGpx(i, gpsData)
args._i.append(gpsData[-1][1].rsplit('.gpx',1)[0]+'.gpx')
else:
if 'd' in i:
args.drop_points[args._i[-1]] = True
if 'r' in i:
args.raw_trksegs[args._i[-1]] = True
if 'z' in i:
args.zip_trksegs[args._i[-1]] = True
args.i = args._i
del args._i
while len(args.s) > len(args.i):
del args.s[1]
args.styles = { args.i[n]: args.s[min(n,len(args.s)-1)]
for n in range(len(args.i)) }
debug('arguments parsed and processed:\n', vars(args), '\n\n')
# Check if we actually _have_ data
if gpsData == []:
print('No data to convert. Terminating.', file = sys.stderr)
sys.exit(1)
# Try to combine all track segments that can be combined if not requested otherwise
gpsData = combineSegments(gpsData, args)
# Do the Mercator projection of the GPS data
gpsData = mercatorProjection(gpsData)
# Move the projected data to the 0,0 origin of a cartesial coordinate system
# and get the raw width and height of the resulting vector data
gpsData, width, height = moveProjectedData(gpsData)
# Write the resulting SVG data to the requested output file or STDOUT
writeSvgData(gpsData, width, height, args)
if __name__ == '__main__':
main()
4shared-md5sums.php
BearbeitenUnglücklicherweise findet sich beim Freehoster 4shared.com in der Weboberfläche keine Funktion, die MD5-Werte hochgeladener Dateien anzeigt. Die API liefert diese aber auch Nachfrage. Um ein MD5-Listing der eigenen Dateien des Kontos zu erhalten, müssen die Dateien nicht heruntergeladen werden. Es reicht:
- den Code als 4shared-md5sums.php abzuspeichern
- login und pass an die eigenen Kontodaten anzupassen
- php 4shared-md5sums.php über die Kommandozeile aufzurufen
Die Logindaten werden, wie beim Login auf der Weboberfläche, verschlüsselt an api.4shared.com übertragen.
Siehe auch: stackoverflow.com/questions/7848580
Code
Bearbeiten<?php
$user_login = "login";
$user_password = "pass";
$client = new SoapClient("https://api.4shared.com/jax2/DesktopApp?wsdl",
array(
"cache_wsdl" => WSDL_CACHE_DISK,
"trace" => 1,
"exceptions" => 0
)
);
$getAllItems = $client->getAllItems ($user_login, $user_password);
foreach ($getAllItems as $v) {
foreach ($v as $val) {
if (! $val->directory) {
printf ("%10d %s %s\n", $val->size, $val->md5, $val->name);
}
}
}
?>
In der Dateiübersicht der Weboberfläche fehlen Filtermöglichkeiten, so ist leider nicht einmal die Sortierung nach Dateityp möglich. Das macht die Auswahl vieler Dateien eines bestimmten Typs schwierig. In Firefox schafft folgendes Scriptlet Abhilfe, dazu mit Shift+F4 das Javascript-Fenster öffnen. Das Beispiel selektiert alle Dateien mit der Endung jpg. Da die Dateiliste asynchron geladen wird, muss die Sicht vorher ganz nach unten gescrollt werden, andernfalls werden nur Dateien der aktuellen Sicht selektiert.
var objs = document.getElementsByClassName("jpgFileExt");
for (var i = 0 ; i<objs.length; i++) {
objs[i].previousSibling.previousSibling.childNodes[1].checked = true;
objs[i].parentNode.className = objs[i].parentNode.className + " jsChecked";
}
Hugin Parameter animieren
BearbeitenMit Hugin lassen sich Panoramen als Einzelbilder erstellen. In der schnellen Voransicht lassen sich die Positionsparameter zwar live anpassen, es gibt aber keine Möglichkeit für PTBatcherGUI automatisiert mehrere Einzeljobs zu generieren. Das wäre nützlich, falls ein bestimmter Parameter automatisiert mit einer bestimmten Schrittweite von einer Intervallgrenze zur anderen geführt werden soll. Die entstandenen Einzelbilder können dann als Quelle für einen Videoclip dienen. Der Clip "schwenkt" das sonst statische Panoramabild. U.a. die flächentreuen Lambertprojektionen sind für solche Zwecke interessant.
Soll der Gierwinkel z.B. mit 1° Schrittweite über 360° schwenken, so erzeugt das folgende Skript aus einer zuvor bestehenden einzelnen pto-Datei, Frame000.pto genannt, 359 weitere und übergibt sie mit einem geeigneten Präfix an PTBatcherGUI. Bitte darauf achten, dass als Ausgabeformat in Hugin das verlustfreie PNG genutzt wird. Die verlustbehaftete Kompression findet erst am Ziel der Bearbeitungskette, dem Video-Kodierer, statt.
Der Code wurde bisher nur mit einem einzigen Anwendungsfall getestet, es sind evtl. Fehler enthalten. Er wird für interessierte Hugin-Nutzer veröffentlicht und dient evtl. auch nur zur Anregung bei der Lösungssuche zu einem anderen Problem..
Code
Bearbeiten#!/bin/bash
# hugin-animate-yaw.sh (PD) public domain, cmuelle8
# if the source files do not give a 360° view, some vals may need modification
FRAMES=750 # @25fps this will run half a minute
DEGREES=360
{ echo Frame0000.pto Frame0000
for (( i=1 ; i<$FRAMES ; i++ ))
do n=$(printf "%04d" $i)
while read ll
do if [[ "${ll:0:1}" = "i" ]]
then y=$(echo "$ll" | grep -o "y[0-9-]*\.[0-9]*")
ynew="${y:1}"
ynew="y$(bc -ql <<EOF
define int(x){auto oldscale;oldscale=scale;scale=0;x=x/1;scale=oldscale;return(x);}
define fmod(x,y){auto oldscale;oldscale=scale;scale=1000;x=x-y*int(x/y);scale=oldscale;return(x);}
fmod($ynew+($i*$DEGREES/$FRAMES)+180,360)-180
quit
EOF
)"
echo "${ll/$y/$ynew}"
else
echo "$ll"
fi
done < Frame0000.pto > Frame$n.pto
echo Frame$n.pto Frame$n
done
} | xargs PTBatcherGUI
#
# PTBatcher Warteschlange mit "Play" starten, Einzelbilder erzeugen lassen..
#
# Film erzeugen (ENCODER auf ffmpeg oder avconv anpassen, je nach Installation)
OUTFILE=outfile.webm
ENCODER=avconv
CODEC=libvpx
RATE=25
# Qualität des Videos, http://superuser.com/questions/556463/converting-video-to-webm-with-ffmpeg-avconv
QUALITY="-qmin 0 -qmax 50 -crf 10 -b:v 2M"
# Länge des Videos, für 4 Sekunden Test werden z.B. 4*RATE Bilder benötigt
# Falls nicht alle Bilder der Serie benutzt werden sollen (Vor-/Testansicht)
#LENGTH="-t $((4*RATE))"
# Skalierung und Beschnitt
SCALE="w=4096:h=928"
CROP_AFTER_SCALE="4096:912:0:16"
# Falls Skalierung und Beschnitt gewünscht
#VF="-vf scale=$SCALE,crop=$CROP_AFTER_SCALE"
$ENCODER -r $RATE -i Frame%04d.png $VF -vcodec $CODEC $QUALITY -r $RATE $LENGTH $OUTFILE
wp-fluss-set.sh
BearbeitenDieser Bash/SQL-Code versucht sein Bestes, um aus Flussläufen, die in OSM gespeichert sind und über die Overpass API abgerufen werden können, eine schematische Darstellung zu erzeugen - siehe Vorlage:Flusssymbole und Vorlage:Fluss-Set-Doku. Am Beispiel der Weißen Elster rechnet das Programm ca. 1min 30s, um die Liste zu produzieren. Der Code ist als Proof-of-Concept anzusehen, und bietet sicher Optimierungspotential.
Erkennung von
Bearbeiten- Brücken, -typen und -längen
- allen mündenden und abzweigenden Gewässern
- Lage der Nebenflüsse (links-/rechtsseitig)
- Seen / Stauseen
- Wehren
Notizen
Bearbeiten- der Hauptstrom des Flusslaufs
- muss als benannte Relation in der OSM-DB vorliegen - evtl. unter Verwendung der Relation-ID in fetchdata() anpassbar
- Rollen werden nicht ausgewertet
- die Sortierung der Wege innerhalb der Relation spielt keine Rolle
- wichtig ist, dass alle Teilwege des Hauptstroms enthalten sind, keine Lücken
- der (erste) Weg von der Quelle und der (letzte) zur Mündung müssen in Fließrichtung gerichtet sein
- Wege des Hauptstroms müssen in mindestens einem ihrer name-Tags (name, name:de, name:en, etc.) den Namen der Relation verwenden
- e.g. berücksichtigt werden alle Wege der Relation[name=Rhein], die in mindestens einem ihrer name-Tags Rhein verwenden
- Nebenarme und -flüsse werden
- erkannt, falls sie mit dem Linienzug/Weg des Flusslaufs einen Knoten gemeinsam haben
- nicht erkannt, falls sie lediglich auf dem Streckenzug der umgebenden riverbank enden
- teilweise nicht erkannt, wenn sie gleichrangig eine Insel umfließen und beide Arme den gleichen Namen tragen - dabei werden alle name-Tags durchsucht
- in diesem Fall wird einer der beiden Arme traversiert, der andere ignoriert
- das Inselsymbol gesetzt - manche waterways in OSM, die als Verzweigung erscheinen sind allerdings nur Fahrrinnen desselben Gewässers
- Problem: tragen beide Arme verschiedene Namen und keiner von beiden den des Hauptstroms, kann keine Sortierung von der Quelle bis zur Mündung gefunden werden - Beispiel Hamburg, Norderelbe, Süderelbe, würde name:short nicht existieren
- mögliche Fallback-Lösung (nicht implementiert):
- Relation von Overpass API mit ausgeben lassen
- testen ob main_stream Rolle auf mehr als einem Element verwendet wird (sonst Relation vermutlich nicht gepflegt)
- Tabelle 'river' mittels der main_stream Mitglieder erzeugen, statt auf 'name%' zu prüfen
- Querlaufende Bauten
- Dämme und andere Querbauten werden nur dann berücksichtigt, wenn ihre Geometrie sich mit dem Weg des Flusslauf einen Knoten teilt
- Brücken werden berücksichtigt - dazu bitte als zweiten Parameter "do_crossings" angeben
- Technisches
- es werden Schreibrechte in /tmp und /dev/shm benötigt (kann angepasst werden, erste vier Variablen)
- läuft evtl. unter Cygwin (ungetestet), wenn Abhängigkeiten entsprechend installiert sind
Externe Abhängigkeiten
Bearbeiten- spatialite
- spatialite-tools
- bash (ab 4.0_alpha, wegen coproc)
- wget
Beispiel-Aufruf
Bearbeiten# Fehlermeldungen direkt auf der Konsole ausgeben, Ausgabe in WE.out
./wp-fluss-set.sh "Weiße Elster" "do_crossings" >"WE.out"
# Alternativ: Zeitmessung und Fehler in WE.err, Ausgabe in WE.out
time ./wp-fluss-set.sh "Weiße Elster" "do_crossings" 2>"WE.err" >"WE.out"
# Falls die von Overpass-API geladenen Daten in [[JOSM]] oder [[Merkaartor]] geöffnet werden sollen
./wp-fluss-set.sh "Weiße Elster" "do_crossings" "do_meta" >"WE.out"
# Die letzten beiden Alternativen jeweils ohne die Brückenberechnung
./wp-fluss-set.sh "Weiße Elster" 2>"WE.err" >"WE.out"
./wp-fluss-set.sh "Weiße Elster" "" "do_meta" >"WE.out"
# Referenzzeiten
real 6m39.358s
user 0m37.341s
sys 1m40.942s
Beispielhafte Ausgaben:
Code
Bearbeiten#!/bin/bash
# wp-fluss-set.sh GPLv3 (c) 2012 cmuelle8
SLDB="/dev/shm/tmp123_$1.sqlite"
APIOUT="/dev/shm/tmp123_$1.osm"
#SLDB="/tmp/tmp123_$1.sqlite"
#APIOUT="/tmp/tmp123_$1.osm"
# todo
# - query MP data on non or source-only tagged ways
# (detect dams, lakes, etc. properly)
# - query relations of crossing ways
# (sometimes there are multiple ways in osm,
# one for each usage mode or direction, but
# only a single physical structure exists.)
# - nearby locality naming for features
# (e.g. nearby [[Crimmitschau]] | BAB 4)
# - detect state boundary crossings
# - state tracker for run-along ditches
# - put in rtree logic for query performance
# main query to pass to overpass api
opbase="http://www.overpass-api.de/api/interpreter"
opquery=$(cat <<EOF | tr -d "\n"
[timeout:9000];
relation
["name"="$1"]
["type"="waterway"];
way(r);
node(w);
way(bn);
(
way._
["waterway"]
["waterway"!="riverbank"]
->.a;
way._["water"]->.b;
way._["natural"="water"]->.c;
way._["landuse"="reservoir"]->.d;
node(w.a);
node(w.b);
node(w.c);
node(w.d);
);
out$([[ -n "$3" ]] && echo " meta");
EOF
)
# query overpass api for river and all side arm data
# $1 .. name of river
# $2 .. apiout output file
function fetch_river () {
[[ -f "$2" ]] && return 0
echo "INFO(fetch_river): Querying Overpass (result in $2).." >&2
wget -T 9000 -qO - "$opbase?data=$opquery" >"$2"
}
# query overpass api for additional data
# $1 .. name of river
# $2 .. apiout output file
# $3 .. node list
function fetch_crossing () {
[[ -f "$2" ]] && touch "$2" && return 0
{
echo -n "data=${opquery%)*}"
read m
while read n
do
qf "SELECT MbrMinY(c), MbrMinX(c), MbrMaxY(c), MbrMaxX(c) \
FROM ( \
SELECT Buffer(Extent(Geometry), 0.002) c \
FROM osm_nodes \
WHERE $m = node_id OR $n = node_id)" \
| tr '|' ',' \
| while read bbox
do
echo -n "way ($bbox)[\"bridge\"]; node(w);"
echo -n "way ($bbox)[\"tunnel\"]; node(w);"
done
m=$n
done
echo -n ")${opquery##*)}"
} < <(echo "$3") > "$2.post"
echo "INFO(fetch_crossing): Querying additional data (result in $2).." >&2
wget -T 9000 -qO - "$opbase" --post-file="$2.post" >"$2"
}
# create db and postprocess tables
# $1 .. name of river
# $2 .. osm input file
# $3 .. sqlite output file
function postproc () {
[[ -f "$3" && "$3" -nt "$2" ]] && return 0
qf ".exit" && rm -f "$3"
echo "INFO(postproc): Creating sqlite db (result in $3).." >&2
spatialite_osm_raw -d "$3" -o "$2" >&2
sql=$(cat <<EOF
DROP TABLE IF EXISTS river;
DROP TABLE IF EXISTS river_refs;
DROP TABLE IF EXISTS river_conn;
DROP TABLE IF EXISTS river_conn_mm;
DROP TABLE IF EXISTS river_conn_alt;
DROP TABLE IF EXISTS river_skel_alt;
DROP TABLE IF EXISTS river_skel;
DROP TABLE IF EXISTS river_skel_ends;
DROP TABLE IF EXISTS river_qm_cand;
DROP TABLE IF EXISTS river_qm;
DROP TABLE IF EXISTS river_az;
DROP TABLE IF EXISTS arms_refs;
DROP TABLE IF EXISTS crossing;
CREATE TABLE river AS
SELECT way_id w
FROM osm_way_tags
WHERE k LIKE 'name%' AND v="$1"
GROUP BY way_id
;
------------------------------------------------------------
-- pull all (w, sub, node) triples for river
CREATE TABLE river_refs AS
SELECT way_id w, sub s, node_id n
FROM osm_way_refs, river
ON way_id = w
;
-- pull all triples representing connected nodes
CREATE TABLE river_conn AS
SELECT *
FROM river_refs
WHERE n IN
(SELECT n FROM river_refs GROUP BY n HAVING COUNT() > 1)
;
-- look up max and min positions of w in river_conn
CREATE TABLE river_conn_mm AS
SELECT w, MAX(s) max, MIN(s) min
FROM river_conn
GROUP BY w
;
-- look up alternate paths for w in river_conn
CREATE TABLE river_conn_alt AS
SELECT l.*, r.w alt_w
FROM river_conn l, river_conn r, river_conn_mm m
ON l.n = r.n AND l.w != r.w AND
l.w = m.w AND (l.s = m.max OR l.s = m.min)
GROUP BY l.w, r.w
HAVING COUNT() > 1
;
------------------------------------------------------------
-- drop ways from river_conn connected by one node only
CREATE TABLE river_skel_alt AS
SELECT l.*
FROM river_conn l, river_conn_mm m
ON l.w = m.w AND max != min
;
-- drop alternate ways from river_skel_alt
CREATE TABLE river_skel AS
SELECT l.*
FROM river_skel_alt l, river_conn_mm m
ON l.w = m.w AND (l.s = m.max OR l.s = m.min) AND
l.w NOT IN
(SELECT MAX(r.w)
FROM river_conn_alt l, river_conn_alt r
ON l.w = r.w OR l.w = r.alt_w
GROUP BY l.w)
;
-- select ends of river_skel
CREATE TABLE river_skel_ends AS
SELECT *
FROM river_skel
GROUP BY n
HAVING COUNT() = 1
;
------------------------------------------------------------
-- compute candidates for spring and mouth
CREATE TABLE river_qm_cand AS
SELECT
r.w w,
CASE c.s
WHEN MAX(r.s) THEN MIN(r.s)
WHEN MIN(r.s) THEN MAX(r.s)
ELSE -1
END s,
CASE c.s
WHEN MAX(r.s) THEN m.max
WHEN MIN(r.s) THEN m.min
ELSE -1
END ms,
MAX(r.s) = c.s qm,
GreatCircleLength(MakeLine(n.Geometry)) d
FROM
river_refs r,
river_conn c,
river_skel_ends e,
river_conn_mm m,
osm_nodes n
ON
r.w = c.w AND
c.w = m.w AND
c.n = e.n AND
(r.s = c.s OR r.s NOT BETWEEN m.min AND m.max) AND
r.n = n.node_id
GROUP BY r.w
HAVING COUNT() > 1
;
-- choose longest segment to or from river_skel_ends
CREATE TABLE river_qm AS
SELECT l.*
FROM river_qm_cand l, river_qm_cand r
ON l.qm = r.qm
GROUP BY l.w, r.qm
HAVING l.d = MAX(r.d)
;
------------------------------------------------------------
-- construct a skeleton to be used for traversal
CREATE TABLE river_az AS
SELECT *
FROM river_skel
WHERE w NOT IN (SELECT w FROM river_qm)
UNION
SELECT l.*
FROM river_refs l, river_qm r
ON l.w = r.w AND (l.s = r.s OR l.s = r.ms)
;
------------------------------------------------------------
-- create the complement of river_refs wrt osm_way_refs
CREATE TABLE arms_refs AS
SELECT way_id w, sub s, node_id n
FROM osm_way_refs
EXCEPT
SELECT *
FROM river_refs
;
------------------------------------------------------------
-- precompute crossing linestrings
CREATE TABLE crossing AS
SELECT MakeLine(t.Geometry) g, l.way_id w
FROM osm_way_tags l, osm_way_refs r, osm_nodes t
ON (l.k='bridge' OR l.k='tunnel')
AND l.v != 'no'
AND l.way_id = r.way_id
AND r.node_id = t.node_id
GROUP BY l.way_id
;
EOF
)
qf "$sql"
# support unsplit, single way rivers
[[ $(q "COUNT()" "river_qm" "1") -lt 2 ]] && {
sql=$(cat <<EOF
DROP TABLE IF EXISTS river_qm_cand;
CREATE TABLE river_qm_cand AS
SELECT l.*
FROM river_refs l,
(SELECT w, MAX(s) max, MIN(s) min
FROM river_refs
GROUP BY w
ORDER BY max DESC
LIMIT 1) r
ON l.w = r.w AND ( s = max OR s = min )
;
DROP TABLE IF EXISTS river_qm;
CREATE TABLE river_qm AS
SELECT * FROM river_qm_cand
;
DROP TABLE IF EXISTS river_az;
CREATE TABLE river_az AS
SELECT * FROM river_qm
;
EOF
)
qf "$sql"
}
}
function q () {
qf "SELECT $1 FROM $2 t WHERE $3;"
}
function qll () {
qf "SELECT $1 FROM $2 t WHERE $3;" | tail -1
}
function qf () {
# crappy alternative, spawns a process for each query
#spatialite -list "$SLDB" "$1" | cat
[[ "$1" = ".exit" ]] && {
[[ -n "$COPROC_PID" ]] && {
echo "$1" >&7
wait $COPROC_PID
}
unset COPROC_PID
exec 7>&- 9>&-
return 0
}
[[ -z "$COPROC_PID" ]] && {
coproc spatialite -batch -list "$SLDB"
exec 7>&${COPROC[1]} 9<&${COPROC[0]}
}
#echo "INFO(qf): $1" >&2
echo "$1;SELECT 'EOF___';" >&7
read -ru 9 line
while [[ "$line" != "EOF___" ]]
do
echo "$line"
read -ru 9 line
done
}
# extract tags
function qtags () {
local ou typ ref name wiki
local name_de name_xx wiki_de wiki_xx
while IFS='|' read k v
do
case "$k" in
bridge) [[ "$v" != "no" ]] && ou="br" ;;
tunnel) [[ "$v" != "no" ]] && ou="uf" ;;
name:de) name_de="$v" ;;
name:*) name_xx="$v" ;;
wikipedia:de) wiki_de="$v" ;;
wikipedia:*) wiki_xx="$v" ;;
ref|name|wikipedia) local $k="$v" ;;
waterway|water|natural|landuse) local $k="$v" ;;
railway|highway) local $k="$v" ;;
esac
done < <(q "k, v" "osm_${1/_id/}_tags" "$1 = $2")
case "$3" in
crossing) typ="${highway:-${railway:-${waterway}}}" ;;
*) typ="${waterway:-${water:-${natural:-${landuse}}}}" ;;
esac
name="${name_de:-${name:-$name_xx}}"
wiki="${wikipedia:-${wiki_de:-$wiki_xx}}"
wiki="${wiki##*wiki/}"
[[ -z "$name" && -n "$wiki" ]] && {
name="${wiki##*:}"
name="${name##*#}"
}
wiki="[[$wiki|$name]]"
wiki="${wiki/\[|/[}"
wiki="${wiki/|\]/]}"
wiki="${wiki/\[\[\]\]/}"
#echo -e "INFO(qtags): $1\t $2\t $ou|$typ|$ref|$name|$wiki" >&2
echo "$ou|$typ|$ref|$name|$wiki"
}
function print_nodes () {
#echo "INFO(print_nodes): next way is $1, with sub nodes from $2 to $3" >&2
if [[ $2 -lt $3 ]]
then
q "node_id" "osm_way_refs" \
"$1 = way_id AND sub BETWEEN $2 AND $3 ORDER BY sub ASC"
else
q "node_id" "osm_way_refs" \
"$1 = way_id AND sub BETWEEN $3 AND $2 ORDER BY sub DESC"
fi
}
function housekeeper {
echo "INFO(housekeeper): Finishing db connection.." >&2
qf ".exit"
exit 1
}
# -----------------------------------------------
# trap signals so coproc can shutdown properly
trap "housekeeper 'terminating early'" SIGHUP SIGINT SIGTERM
# -----------------------------------------------
# fetch and postprocess raw osm import
fetch_river "$1" "$APIOUT" ; [[ "$2" = "fetch" ]] && exit
postproc "$1" "$APIOUT" "$SLDB" ; [[ "$2" = "proc" ]] && exit
{
echo "INFO: check OSM data, if more than two entries"
q "*" "river_qm_cand" "1"
echo
} >&2
# l, m, n, node_id .. node
# s, sub_a, sub_z .. position of node in way
# w, way_id .. way
# q, qll, qf .. query functions
l=0 m=0 n=0
sub_a=0 sub_z=0
w=$(q "w" "river_qm" "s=0")
[[ -z "$w" ]] && {
echo "ERROR: nothing to start with.." >&2
exit
}
# produce an ordered node list from spring to mouth
IFS='' nlist=$(
while [[ $sub_a -ge 0 ]]
do
while IFS='|' read sub_z0 n0
do
sub_z=$sub_z0 n=$n0
done < <(q "s, n" "river_az" "$w = w AND $sub_a != s")
print_nodes $w $sub_a $sub_z
sub_a=-1
while IFS='|' read w0 sub_a0
do
[[ $sub_a -ne 0 ]] && w=$w0 sub_a=$sub_a0
done < <(q "w, s" "river_az" "$w != w AND $n = n")
done | uniq
)
# -----------------------------------------------
# fetch additional data, recreate SLDB
case $2 in
do_cross*)
fetch_crossing "$1" "$APIOUT.crossing.osm" "$nlist"
postproc "$1" "$APIOUT.crossing.osm" "$SLDB"
;;
esac
# start output
echo "{{Fluss-Klappheader|$1}}"
echo "{{Fluss-Zeile|q|Quelle $1||zl=|zr=}}"
# for each node in nlist do..
[[ -n "$nlist" ]] && echo "$nlist" | {
read m
while read n
do
IFS='|' read ou ntyp ref name wiki < <(qtags "node_id" "$m")
case "$ntyp" in
weir)
echo "{{Fluss-Zeile|w|$name||zl=|zr=}}"
;;
esac
# detect (some) islands
case $(q "COUNT()" "river_conn" "$m = n") in
[3-9])
echo "{{Fluss-Zeile|i||zl=|zr=}}"
;;
esac
# detect left / right side stream
qf "$(cat <<EOF
DROP VIEW IF EXISTS lr_arm;
CREATE VIEW lr_arm AS
SELECT r.w qw, r.s qs, t.Geometry q,
(SELECT Geometry FROM osm_nodes WHERE $l = node_id) l,
(SELECT Geometry FROM osm_nodes WHERE $m = node_id) m,
(SELECT Geometry FROM osm_nodes WHERE $n = node_id) n
FROM
arms_refs l,
arms_refs r,
osm_nodes t
ON $m = l.n
AND l.w = r.w
AND ABS(l.s-1) = r.s
AND r.n = t.node_id
EOF
)"
# q .. nearest node of side stream to m
# l .. node before m
# m .. side stream mouth / merge point
# n .. node after m
IFS='' lrres=$(q "qw, qs, \
SIGN( (X(n)-X(l))*(Y(m)-Y(l)) - (Y(n)-Y(l))*(X(m)-X(l)) ), \
SIGN( (X(q)-X(l))*(Y(m)-Y(l)) - (Y(q)-Y(l))*(X(m)-X(l)) ), \
SIGN( (X(q)-X(m))*(Y(n)-Y(m)) - (Y(q)-Y(m))*(X(n)-X(m)) )" \
"lr_arm" "1")
# there may be multiple streams merging at node $m
[[ -n "$lrres" ]] && echo "$lrres" | {
while IFS='|' read w s lmn lmq mnq
do
IFS='|' read ou typ ref name wiki < <(qtags "way_id" "$w")
osm="{{osm|w|$w|text=${name:-${typ}}}}"
[[ "$s" = "1" ]] && ae="ga" || ae="b"
[[ "$lmn" = "1.0" && ( "$lmq" = "1.0" && "$mnq" = "1.0" ) || \
"$lmn" != "1.0" && ! ( "$lmq" = "-1.0" && "$mnq" = "-1.0" ) ]] \
&& lr="r" || lr="l"
case "$typ" in
weir)
[[ "$ntyp" != "weir" ]] \
&& echo "{{Fluss-Zeile|w|$osm||zl=|zr=}}"
;;
dam|reservoir)
echo "{{Fluss-Zeile|st|$osm|$wiki|zl=|zr=}}"
;;
water)
echo "{{Fluss-Zeile|see|$osm|$wiki|zl=|zr=}}"
;;
river|canal|stream|ditch|drain)
#[[ "$lr" = "l" ]] && wiki="$osm|$wiki" || wiki="$wiki|$osm"
wiki="$osm|$wiki"
echo "{{Fluss-Zeile|$lr$ae|$wiki|zl=|zr=}}"
;;
riverbank)
;;
esac
done
}
# detect crossing features
case $2 in
do_cross*)
IFS='' brres=$(qf "$(cat <<EOF
SELECT r.w w,
AsText(Intersection(l.g, r.g)) i,
360*60*Distance(l1.g, Intersection(l.g, r.g)) d,
Round(GeodesicLength(r.g)) sw
FROM
(SELECT Geometry g FROM osm_nodes
WHERE $m = node_id) l1,
(SELECT MakeLine(Geometry) g FROM osm_nodes
WHERE $n = node_id OR $m = node_id) l,
crossing r
ON Crosses(l.g, r.g) > 0
ORDER BY d ASC
EOF
)"
)
;;
esac
[[ -n "$brres" ]] && echo "$brres" | {
while IFS='|' read w midpoint dist length
do
IFS='|' read ou typ ref name wiki < <(qtags "way_id" "$w" "crossing")
midpoint_lat=${midpoint%)*} midpoint_lat=${midpoint_lat##* }
midpoint_lon=${midpoint#*(} midpoint_lon=${midpoint_lon%% *}
function print_unhandled () {
echo
echo " -= unhandled crossing feature =-"
echo " way........: {{osm|w|$w}}; length:${length%.*}m; ou:$ou"
echo " /crosses-at: NS=$midpoint_lat|EW=$midpoint_lon"
echo " |distance..: ${dist%.*}m"
echo ' \near-rnode: {{osm|n|'$m'}}'
tags="$(
[[ -n "$typ" ]] && echo " type:$typ"
[[ -n "$name" ]] && echo " name:$name"
[[ -n "$ref" ]] && echo " ref.:$ref"
[[ -n "$wiki" ]] && echo " wiki:$wiki"
)"
[[ -n "$tags" ]] && echo $tags || echo " NO_TAGS on $w"
}
function print_ref () {
while read sr
do
case "$sr" in
A*) link="|Bundesautobahn ${sr#A }" ;;
B*) link="|Bundesstraße ${sr#B }" ;;
E*) link="|Europastraße ${sr#E }" ;;
*) link="" ;;
esac
pre="${pre:-${link:+[[${link#|}${link%% *}]]}}"
desc="$desc{{RSIGN|DE|${sr/ /|}${link}}}"
done < <( \
sed -e 's| */ *|;|g;' -e 's| *; *|;|g' | tr ';' '\n' | \
sed -e 's|^|K/|;' -e 's/K.St/StBy/g;' -e 's/K.S/StSn/g;' \
-e 's/K.\([EABLSK]\) /\1 /g;' | \
grep '[EABLSK].* [0-9]\+')
desc="${desc:+${pre:-"Straße"}}${desc:+" "}${desc}"
echo "$desc"
}
# use a link here only if osm-way has a wikipedia-tag
[[ "${wiki}" = "${wiki/|/}" ]] && wiki=""
cname="" cregion="DE"
rdesc="" rsymb="" desc="" symb=""
pre=""
# detect
case "$typ" in
motorway*)
cname="${cname:-${name:-Autobahnbrücke $ref}}"
rsymb="${rsymb:-Bridge pictogram.svg}"
read desc < <(print_ref <<<"$ref")
;;
trunk*|primary*)
cname="${cname:-${name:-gr. Kfz-Brücke $ref}}"
rsymb="${rsymb:-Sinnbild LKW mit Anhänger.svg}"
read desc < <(print_ref <<<"$ref")
;;
secondary*|tertiary*|unclassified)
cname="${cname:-${name:-Kfz-Brücke $ref}}"
rsymb="${rsymb:-Sinnbild PKW mit Anhänger.svg}"
read desc < <(print_ref <<<"$ref")
;;
residential|living_street)
cname="${name:-kl. Kfz-Brücke}"
symb="Sinnbild Innerorts.svg"
;;
service)
cname="${name:-Versorgungsweg}"
#rsymb="Sinnbild Pannenhilfe.svg"
;;
track)
cname="${name:-Wirtschaftsweg}"
rsymb="Sinnbild Traktor.svg"
;;
rail|tram)
cname="${name:-Eisenbahnbrücke}"
rsymb="Sinnbild Eisenbahn.svg"
;;
path|footway|cycleway|pedestrian)
cname="${name:-Fußgängerbrücke}"
rsymb="Sinnbild Wanderer.svg"
;;
construction)
cname="${name:-(Brücke im Bau)}"
rsymb="Piktogramm Baustelle.svg"
;;
river|canal|stream|ditch|drain)
cname="${name:-${typ}}"
;;
*)
#symb="Autoroute icone.svg" #symb="BSicon TRAM1.svg"
#symb="Sinnbild LKW.svg" #symb="Sinnbild PKW.svg"
#symb="Zeichen 331.svg" #symb="Zeichen 330.svg" #symb="Zeichen 240.svg"
print_unhandled >&2
;;
esac
[[ -n "$cname" ]] && {
osm="{{osm|w|$w|text=$cname|tooltip=Länge ca. ${length%.*}m}}"
rdesc="${wiki:-$osm}"
cname="$cname über die $1"
coord="<small>"
coord="${coord}{{Coordinate|NS=$midpoint_lat|EW=$midpoint_lon"
coord="${coord}|type=landmark|region=$cregion|text=ICON2|name=$cname}}"
coord="${coord}</small>"
echo "{{Fluss-Zeile|$ou|$rdesc|$desc|zr=$rsymb|zl=$symb}}"
}
done
}
l=$m
m=$n
done
}
# finish output
tributary_of=$(qf "$(cat <<EOF
SELECT l.v FROM osm_way_tags l, osm_way_refs r
ON l.k LIKE 'name%'
AND l.way_id = r.way_id
AND r.way_id NOT IN (SELECT w FROM river)
AND r.node_id = $(tail -1 <<<$nlist)
ORDER BY k ASC
;
EOF
)" | head -1)
echo "{{Fluss-Klappfooter${tributary_of:+"|[[$tributary_of]]"}}}"
# cleanup
# rm -f $SLDB $APIOUT