野狐禅
发表于 2008-1-16 21:19
原帖由 cnywj 于 2008-1-16 21:12 发表 http://bbs.macd.cn/static/image/common/back.gif
具体在实际中使用,把那个 if 买入资金> 卖出股票量*当前价格
{ price+=0.01}
替换为 P=K-cQ, 可以基本套用这个模型了 :* ...
有多少卖,就有多少买,这总量是衡定的,但买卖的意愿是不同的。否则钱怎么会蒸发呢?:*29*:
野狐禅
发表于 2008-1-16 21:21
原帖由 cnywj 于 2008-1-16 21:16 发表 http://bbs.macd.cn/static/image/common/back.gif
但是感觉这个网子有些时候好像真的搞个猫像老虎呢
老虎和猫本来就差别不大,是不是?
cnywj
发表于 2008-1-16 21:24
原帖由 野狐禅 于 2008-1-16 21:21 发表 http://bbs.macd.cn/static/image/common/back.gif
老虎和猫本来就差别不大,是不是?
是的,所以想学学画猫 :*22*:
实在对建立隐含层...,等这些的程序过程有些头大,业余时间啃这个干馍馍,腮帮子疼
cnywj
发表于 2008-1-16 21:28
老和尚有没有一段最简单的 画猫的代码贴给我啃个一月两月的 :*26*:
野狐禅
发表于 2008-1-16 23:13
原帖由 cnywj 于 2008-1-16 21:28 发表 http://bbs.macd.cn/static/image/common/back.gif
老和尚有没有一段最简单的 画猫的代码贴给我啃个一月两月的 :*26*:
package ann;
/**
* Copyright: Copyright (c) 2004
* @version 1.0
*/
import java.io.*;
import java.util.*;
/**
* Three-Layer Feed-Forward Neural Network
*/
public class BP
{
/**
* control parameters
*/
public static final int REGRESSION = 0;
public static final int CLASSIFY = 1;
/**
* neural network structure control parameters
*/
public int m_iInputNodes= 0;
public int m_iHiddenNodes = 0;
public int m_iOutputNodes = 0;
public int m_iTaskType = 0;
/**
* neural network data structure
*/
public double [] mdInput= null; //inputlayer
public double [] mdHidden = null; //hidden layer
public double [] mdOutput = null; //output layer
public double [] mdIB = null; //inputlayer bias
public double [] mdHB = null; //hidden layer bias
public double [][] mdIH = null; //input-hiddenlayer weights
public double [][] mdHO = null; //hidden-output layer weights
/**
* weights
*/
int m_iNumWeights = 0;
double [] mdWeights = null;
/**
* Constructor
*/
public BP() {}
/**
* duplicate a BP
*/
public void copy(BP bp)
{
int i;
//initialize neural network and set memory
initialize(bp.m_iInputNodes,
bp.m_iHiddenNodes,
bp.m_iOutputNodes,
bp.m_iTaskType);
//copy weights
System.arraycopy(bp.mdIB, 0, mdIB, 0, m_iHiddenNodes);
for (i=0; i<m_iHiddenNodes; i++)
System.arraycopy(bp.mdIH, 0, mdIH, 0, m_iInputNodes);
System.arraycopy(bp.mdHB, 0, mdHB, 0, m_iOutputNodes);
for (i=0; i<m_iOutputNodes; i++)
System.arraycopy(bp.mdHO, 0, mdHO, 0, m_iHiddenNodes);
}
/**
* Initialize Neural Network
*/
public void initialize(int numInputNodes,
int numHiddenNodes,
int numOutputNodes,
int taskType)
{
int i,j;
//neural network structur parameters
m_iInputNodes= numInputNodes;
m_iHiddenNodes = numHiddenNodes;
m_iOutputNodes = numOutputNodes;
m_iTaskType = taskType;
//neural network i/o vector
mdInput= new double ;
mdHidden = new double ;
mdOutput = new double ;
//neural network bias and weights
mdIB = new double ;
mdIH = new double ;
mdHB = new double ;
mdHO = new double ;
//todal weights
m_iNumWeights = m_iHiddenNodes*(1 + m_iInputNodes)
+ m_iOutputNodes*(1 + m_iHiddenNodes);
mdWeights = new double ; //weight exchange
//initialize weights
for (i=0; i<m_iHiddenNodes; i++)
{
mdIB = Math.random();
for (j=0; j<m_iInputNodes; j++) mdIH = Math.random();
}
for (i=0; i<m_iOutputNodes; i++)
{
mdHB = Math.random();
for (j=0; j<m_iHiddenNodes; j++) mdHO = Math.random();
}
}
/**
* Output Neural Network Weights
*/
public double [] getWeights()
{
int i, iSize;
iSize = 0;
System.arraycopy(mdIB, 0, mdWeights, iSize, m_iHiddenNodes);
iSize += m_iHiddenNodes;
for (i=0; i<m_iHiddenNodes; i++)
{
System.arraycopy(mdIH, 0, mdWeights, iSize, m_iInputNodes);
iSize += m_iInputNodes;
}
System.arraycopy(mdHB, 0, mdWeights, iSize, m_iOutputNodes);
iSize += m_iOutputNodes;
for (i=0; i<m_iOutputNodes; i++)
{
System.arraycopy(mdHO, 0, mdWeights, iSize, m_iHiddenNodes);
iSize += m_iHiddenNodes;
}
return mdWeights;
}
/**
* Insert Neural Network Weights
*/
public void setWeights(double [] weights)
{
int i,k;
double d;
//input-hidden layer
int iSize = 0;
System.arraycopy(weights, iSize, mdIB, 0, m_iHiddenNodes);
iSize += m_iHiddenNodes;
for (i=0; i<m_iHiddenNodes; i++)
{
System.arraycopy(weights, iSize, mdIH, 0, m_iInputNodes);
iSize += m_iInputNodes;
}
//hidden-output layer :: bias
System.arraycopy(weights, iSize, mdHB, 0, m_iOutputNodes);
iSize += m_iOutputNodes;
//output layer weights
for (i=0; i<m_iOutputNodes; i++)
{
//scaling on output layer for classification task
if (m_iTaskType == CLASSIFY)
{
d = 0;
for (k=iSize; k<iSize+m_iHiddenNodes; k++) d += weights*weights;
d = 1.0/Math.sqrt(d);
for (k=iSize; k<iSize+m_iHiddenNodes; k++) weights *= d;
}
System.arraycopy(weights, iSize, mdHO, 0, m_iHiddenNodes);
iSize += m_iHiddenNodes;
}
}
/**
* Neural Network Ouput
*/
public double [] output(double [] instance)
{
int i,j;
double d;
//copy input variables
System.arraycopy(instance, 0, mdInput, 0, m_iInputNodes);
//input layer to hidden layer
for (i=0; i<m_iHiddenNodes; i++)
{
d = mdIB;
for (j=0; j<m_iInputNodes; j++) d += mdIH*mdInput;
mdHidden = 1.0/(1.0+Math.exp(-d));
}
//hidden layer to output layer
for (i=0; i<m_iOutputNodes; i++)
{
d = mdHB;
for (j=0; j<m_iHiddenNodes; j++) d += mdHO*mdHidden;
mdOutput = d;
}
//modify for classification problem
if (m_iTaskType == CLASSIFY)
for (i=0; i<m_iOutputNodes; i++)
mdOutput = 1.0/(1.0+Math.exp(-mdOutput)) ;
return mdOutput;
}
/**
* Neural Network Structure Redaundency (number of unused links)
*/
public double redundant()
{
int i,j;
/*
* redundancy in regression neural network is
* measured on the average value of output weights
*/
double dR = 0;
for (i=0; i<m_iOutputNodes; i++)
for (j=0; j<m_iHiddenNodes; j++)
dR += Math.abs(mdHO)/(1+Math.abs(mdHO));
dR /= m_iOutputNodes*m_iHiddenNodes;
return dR;
}
/**
* Save Neural Network Structure
*/
public boolean save(String fileName)
{
int i,j;
try
{
FileOutputStream ouF = new FileOutputStream(new File(fileName));
PrintStream ouP = new PrintStream(ouF);
System.out.println("<BP::save> " + fileName);
//control parameters
ouP.println(";BP Neural Network "+ (new Date()).toString());
ouP.println(m_iInputNodes+"\t;InputNodes");
ouP.println(m_iHiddenNodes +"\t;Hidden Nodes");
ouP.println(m_iOutputNodes +"\t;Output Nodes");
ouP.println(m_iTaskType +"\t;0: Regression, 1: Classification");
//input-hidden layer weights
ouP.println(";\n;Input-Hidden Layer Weights");
for (i=0; i<m_iHiddenNodes; i++)
{
ouP.print(i +", "+mdIB);
for (j=0; j<m_iInputNodes; j++) ouP.print(", "+ mdIH);
ouP.println();
}
//hidden-output layer weights
ouP.println(";\n;Hidden-Output Layer Weights");
for (i=0; i<m_iOutputNodes; i++)
{
ouP.print(i +", "+ mdHB);
for (j=0; j<m_iHiddenNodes; j++) ouP.print(", "+ mdHO);
ouP.println();
}
ouP.close();
ouF.close();
}
catch (Exception ex)
{
ex.printStackTrace();
System.out.println("<BP::save> error on saving "+ fileName);
return false;
}
return true;
}
/**
* Read Neural Network Structure
*/
public boolean read(String fileName)
{
int i;
String sItem, sLine;
StringTokenizer ST;
try
{
System.out.println("<BP::read> "+ fileName);
BufferedReader inB = new BufferedReader(new FileReader(fileName));
//read control parameters
int iNumInputNodes= 0;
int iNumHiddenNodes = 0;
int iNumOutputNodes = 0;
int iTaskType = 0;
int iCount = 0;
while ((sLine = inB.readLine()) != null && iCount < 4)
{
sLine = sLine.trim();
if (sLine.length() == 0 || sLine.charAt(0) == ';') continue;
ST = new StringTokenizer(sLine, " ,;\t");
sItem = ST.nextToken();
switch(iCount)
{
case 0: iNumInputNodes= Integer.parseInt(sItem); break;
case 1: iNumHiddenNodes = Integer.parseInt(sItem); break;
case 2: iNumOutputNodes = Integer.parseInt(sItem); break;
case 3: iTaskType = Integer.parseInt(sItem); break;
}
iCount ++;
}
//set neural network
initialize(iNumInputNodes, iNumHiddenNodes, iNumOutputNodes, iTaskType);
//read input-hidden layer weights
iCount = 0;
while (iCount < m_iHiddenNodes && (sLine = inB.readLine()) != null)
{
sLine = sLine.trim();
if (sLine.length() == 0 || sLine.charAt(0) == ';') continue;
ST = new StringTokenizer(sLine, " ,\t");
sItem = ST.nextToken(); //skip index
//parsing input-layer bias
mdIB = Double.parseDouble(ST.nextToken());
//parsing input layer weights
for (i=0; i<m_iInputNodes; i++)
mdIH = Double.parseDouble(ST.nextToken());
iCount ++;
}
//read hidden-output layer weights
iCount = 0;
while (iCount < m_iOutputNodes && (sLine = inB.readLine()) != null)
{
sLine = sLine.trim();
if (sLine.length() == 0 || sLine.charAt(0) == ';') continue;
ST = new StringTokenizer(sLine, " ,\t");
sItem = ST.nextToken(); //skip index
//parsing hidden layer bias
mdHB = Double.parseDouble(ST.nextToken());
//parsing output layer weights
for (i=0; i<m_iHiddenNodes; i++)
mdHO = Double.parseDouble(ST.nextToken());
iCount ++;
}
inB.close();
}
catch (Exception ex)
{
ex.printStackTrace();
System.out.println("<BP::read> error on reading "+ fileName);
return false;
}
return true;
}
/**
* TEST PROGRAM
*/
public static void main(String [] argv)
{
BP bp = new BP();
bp.initialize(8, 5, 2, 0);
bp.save("bp.bp");
bp.read("bp.bp");
bp.save("bp.bp.bak");
}
/** End of BP Class */
}
cnywj
发表于 2008-1-17 00:30
谢老和尚
cnywj
发表于 2008-1-17 01:07
无聊兄,最近闭门修炼,有点萨心得啊?:*22*:
无聊嘛
发表于 2008-1-17 10:36
发个东西供你参考
lmt101
发表于 2008-1-17 11:22
:*22*: :*P :*9*: 我看好中国股市三年。
野狐禅
发表于 2008-1-17 11:26
原帖由 lmt101 于 2008-1-17 11:22 发表 http://bbs.macd.cn/static/image/common/back.gif
我看好中国股市三年。
今年是第三年,还是今年开始往后三年?
野狐禅
发表于 2008-1-17 20:52
1/17/2008:
Day Price Change
0 325.56 -2.60%
1 334.25 -2.54%
2 342.98 0.12%
3 342.58 1.18%
4 338.59 -0.06%
jm0112
发表于 2008-1-17 21:03
老和尚,短期方向向下了吗
无聊嘛
发表于 2008-1-17 22:51
原帖由 liza012 于 2008-1-9 08:52 发表 http://bbs.macd.cn/static/image/common/back.gif
4浪如果说没有完, 那么它的浪型必然是 下 上 下.
哈哈,被你数对了。:*22*:
cnywj
发表于 2008-1-18 00:06
原帖由 无聊嘛 于 2008-1-17 10:36 发表 http://bbs.macd.cn/static/image/common/back.gif
发个东西供你参考
谢无聊兄 :)
cnywj
发表于 2008-1-18 00:09
垮掉一大块肉,但是好像趋势还没有指示向下
野狐禅
发表于 2008-1-18 07:31
原帖由 cnywj 于 2008-1-18 00:09 发表 http://bbs.macd.cn/static/image/common/back.gif
垮掉一大块肉,但是好像趋势还没有指示向下
筒子前天在说昨天要把两个指数都卖掉。
iloveinru
发表于 2008-1-18 09:04
现在改事后公布了?:*27*:
野狐禅
发表于 2008-1-18 21:42
1/18/2007: 过去的个把月,市场平均好于上证指数。沪深综合指数和市场比较接近。
Day Price Change
0 328.47 1.17%
1 324.67 -2.67%
2 333.59 -2.43%
3 341.90 0.16%
4 341.34 1.15%
cnywj
发表于 2008-1-18 22:45
好像今天要是再猛跌下,我这里趋势就显示要转下了 :*27*:
野狐禅
发表于 2008-1-19 01:43
原帖由 cnywj 于 2008-1-18 22:45 发表 http://bbs.macd.cn/static/image/common/back.gif
好像今天要是再猛跌下,我这里趋势就显示要转下了 :*27*:
咱这里是看到的老牛被绊了一下,倒不倒还不好说。:loveliness: